diff --git a/CLOBBER b/CLOBBER index 4cef76d9e6bc..9942791dcef8 100644 --- a/CLOBBER +++ b/CLOBBER @@ -22,5 +22,5 @@ # changes to stick? As of bug 928195, this shouldn't be necessary! Please # don't change CLOBBER for WebIDL changes any more. -Bug 1003764 - Oh snap, Waldo did it again. +Bug 1014976 - Windows debug bustage from linking changes. diff --git a/accessible/src/html/HTMLListAccessible.cpp b/accessible/src/html/HTMLListAccessible.cpp index 8ef0cfb05da9..f879956d4409 100644 --- a/accessible/src/html/HTMLListAccessible.cpp +++ b/accessible/src/html/HTMLListAccessible.cpp @@ -166,9 +166,6 @@ HTMLListBulletAccessible::Name(nsString &aName) nsBlockFrame* blockFrame = do_QueryFrame(mContent->GetPrimaryFrame()); if (blockFrame) { blockFrame->GetBulletText(aName); - - // Append space otherwise bullets are jammed up against list text. - aName.Append(' '); } return eNameOK; diff --git a/accessible/tests/mochitest/text/test_hypertext.html b/accessible/tests/mochitest/text/test_hypertext.html index 98297e127507..aa86c8a6eee7 100644 --- a/accessible/tests/mochitest/text/test_hypertext.html +++ b/accessible/tests/mochitest/text/test_hypertext.html @@ -71,8 +71,8 @@ testText(IDs, 0, 1, kEmbedChar); IDs = [ "listitem" ]; - testCharacterCount(IDs, 5); - testText(IDs, 0, 5, "1.foo"); + testCharacterCount(IDs, 6); + testText(IDs, 0, 6, "1. foo"); testText(["testbr"], 0, 3, "foo"); diff --git a/accessible/tests/mochitest/text/test_lineboundary.html b/accessible/tests/mochitest/text/test_lineboundary.html index 3f152c2aa6ac..6cdf5d3fbe63 100644 --- a/accessible/tests/mochitest/text/test_lineboundary.html +++ b/accessible/tests/mochitest/text/test_lineboundary.html @@ -118,17 +118,17 @@ // list items testTextAtOffset([ "li1" ], BOUNDARY_LINE_START, - [ [ 0, 5, kDiscBulletChar + "Item", 0, 5 ] ]); + [ [ 0, 6, kDiscBulletText + "Item", 0, 6 ] ]); testTextAtOffset([ "li2" ], BOUNDARY_LINE_START, - [ [ 0, 1, kDiscBulletChar, 0, 1 ] ]); + [ [ 0, 2, kDiscBulletText, 0, 2 ] ]); testTextAtOffset([ "li3" ], BOUNDARY_LINE_START, - [ [ 0, 7, kDiscBulletChar + "a long ", 0, 8 ], - [ 8, 11, "and ", 8, 12 ] ]); + [ [ 0, 8, kDiscBulletText + "a long ", 0, 9 ], + [ 9, 12, "and ", 9, 13 ] ]); testTextAtOffset([ "li4" ], BOUNDARY_LINE_START, - [ [ 0, 6, kDiscBulletChar + "a " + kEmbedChar + " c", 0, 6 ] ]); + [ [ 0, 7, kDiscBulletText + "a " + kEmbedChar + " c", 0, 7 ] ]); testTextAtOffset([ "li5" ], BOUNDARY_LINE_START, - [ [ 0, 1, kDiscBulletChar + "\n", 0, 2 ], - [ 2, 6, "hello", 2, 7 ] ]); + [ [ 0, 2, kDiscBulletText + "\n", 0, 3 ], + [ 3, 7, "hello", 3, 8 ] ]); testTextAtOffset([ "ul1" ], BOUNDARY_LINE_START, [ [ 0, 0, kEmbedChar, 0, 1 ], [ 1, 1, kEmbedChar, 1, 2 ], @@ -137,17 +137,17 @@ [ 4, 5, kEmbedChar, 4, 5 ] ]); testTextAtOffset([ "li6" ], BOUNDARY_LINE_START, - [ [ 0, 6, "1.Item", 0, 6 ] ]); + [ [ 0, 7, "1. Item", 0, 7 ] ]); testTextAtOffset([ "li7" ], BOUNDARY_LINE_START, - [ [ 0, 2, "2.", 0, 2 ] ]); + [ [ 0, 3, "2. ", 0, 3 ] ]); testTextAtOffset([ "li8" ], BOUNDARY_LINE_START, - [ [ 0, 8, "3.a long ", 0, 9 ], - [ 9, 12, "and ", 9, 13 ] ]); + [ [ 0, 9, "3. a long ", 0, 10 ], + [ 10, 13, "and ", 10, 14 ] ]); testTextAtOffset([ "li9" ], BOUNDARY_LINE_START, - [ [ 0, 7, "4.a " + kEmbedChar + " c", 0, 7 ] ]); + [ [ 0, 8, "4. a " + kEmbedChar + " c", 0, 8 ] ]); testTextAtOffset([ "li10" ], BOUNDARY_LINE_START, - [ [ 0, 2, "5.\n", 0, 3 ], - [ 3, 7, "hello", 3, 8 ] ]); + [ [ 0, 3, "5. \n", 0, 4 ], + [ 4, 8, "hello", 4, 9 ] ]); testTextAtOffset([ "ol1" ], BOUNDARY_LINE_START, [ [ 0, 0, kEmbedChar, 0, 1 ], [ 1, 1, kEmbedChar, 1, 2 ], diff --git a/accessible/tests/mochitest/textattrs/test_general.html b/accessible/tests/mochitest/textattrs/test_general.html index bc1f8ac36940..a13f3ceaf625 100644 --- a/accessible/tests/mochitest/textattrs/test_general.html +++ b/accessible/tests/mochitest/textattrs/test_general.html @@ -538,9 +538,9 @@ var attrs = { "auto-generated": "true" }; - testTextAttrs(ID, 0, attrs, defAttrs, 0, 2); - testTextAttrs(ID, 2, { }, defAttrs, 2, 6); - testTextAttrs(ID, 6, attrs, defAttrs, 6, 7); + testTextAttrs(ID, 0, attrs, defAttrs, 0, 3); + testTextAttrs(ID, 3, { }, defAttrs, 3, 7); + testTextAttrs(ID, 7, attrs, defAttrs, 7, 8); ////////////////////////////////////////////////////////////////////////// // area19, "HTML5 mark tag" test diff --git a/b2g/installer/Makefile.in b/b2g/installer/Makefile.in index f4455e4e6e9c..5b9770c11b05 100644 --- a/b2g/installer/Makefile.in +++ b/b2g/installer/Makefile.in @@ -22,8 +22,10 @@ DEFINES += -DMOZ_CHILD_PROCESS_NAME=$(MOZ_CHILD_PROCESS_NAME) # Set MSVC dlls version to package, if any. ifdef WIN32_REDIST_DIR +ifdef MOZ_NO_DEBUG_RTL DEFINES += -DMOZ_MSVC_REDIST=$(_MSC_VER) endif +endif ifdef ENABLE_MARIONETTE DEFINES += -DENABLE_MARIONETTE=1 diff --git a/b2g/installer/package-manifest.in b/b2g/installer/package-manifest.in index cf2d50f6fa8d..2184e9f0d8ee 100644 --- a/b2g/installer/package-manifest.in +++ b/b2g/installer/package-manifest.in @@ -69,7 +69,6 @@ @BINPATH@/@MOZ_CHILD_PROCESS_NAME@ #endif #ifdef XP_WIN32 -#ifndef MOZ_DEBUG #if MOZ_MSVC_REDIST == 1600 @BINPATH@/msvcp100.dll @BINPATH@/msvcr100.dll @@ -81,7 +80,6 @@ @BINPATH@/msvcr120.dll #endif #endif -#endif #ifdef MOZ_SHARED_MOZGLUE @BINPATH@/@DLL_PREFIX@mozglue@DLL_SUFFIX@ #endif @@ -808,7 +806,7 @@ bin/components/@DLL_PREFIX@nkgnomevfs@DLL_SUFFIX@ #endif @BINPATH@/components/DataStore.manifest -@BINPATH@/components/DataStoreService.js +@BINPATH@/components/DataStoreImpl.js @BINPATH@/components/dom_datastore.xpt #ifdef MOZ_WEBSPEECH diff --git a/browser/config/mozconfigs/linux32/debug b/browser/config/mozconfigs/linux32/debug index 424a79d6c343..8ea940f70c3b 100644 --- a/browser/config/mozconfigs/linux32/debug +++ b/browser/config/mozconfigs/linux32/debug @@ -1,4 +1,5 @@ ac_add_options --enable-debug +ac_add_options --enable-dmd ac_add_options --enable-signmar ac_add_options --disable-unified-compilation diff --git a/browser/config/mozconfigs/linux64/debug b/browser/config/mozconfigs/linux64/debug index c637320f52f6..cdbe7f7d2b41 100644 --- a/browser/config/mozconfigs/linux64/debug +++ b/browser/config/mozconfigs/linux64/debug @@ -1,4 +1,5 @@ ac_add_options --enable-debug +ac_add_options --enable-dmd ac_add_options --enable-signmar . $topsrcdir/build/unix/mozconfig.linux diff --git a/browser/config/mozconfigs/macosx64/debug b/browser/config/mozconfigs/macosx64/debug index 92d69a679016..923a8471e277 100644 --- a/browser/config/mozconfigs/macosx64/debug +++ b/browser/config/mozconfigs/macosx64/debug @@ -1,6 +1,7 @@ . $topsrcdir/build/macosx/mozconfig.common ac_add_options --enable-debug +ac_add_options --enable-dmd ac_add_options --enable-accessibility ac_add_options --enable-signmar diff --git a/browser/config/mozconfigs/win32/debug b/browser/config/mozconfigs/win32/debug index fb762e785bfb..5b9d2c55715c 100644 --- a/browser/config/mozconfigs/win32/debug +++ b/browser/config/mozconfigs/win32/debug @@ -1,6 +1,8 @@ . "$topsrcdir/browser/config/mozconfigs/common" ac_add_options --enable-debug +ac_add_options --enable-dmd +ac_add_options --enable-profiling # needed for --enable-dmd to work on Windows ac_add_options --enable-signmar ac_add_options --enable-require-all-d3dc-versions diff --git a/browser/config/mozconfigs/win64/debug b/browser/config/mozconfigs/win64/debug index 6c2781224aff..bcfe6ae87143 100644 --- a/browser/config/mozconfigs/win64/debug +++ b/browser/config/mozconfigs/win64/debug @@ -4,6 +4,8 @@ ac_add_options --target=x86_64-pc-mingw32 ac_add_options --host=x86_64-pc-mingw32 ac_add_options --enable-debug +ac_add_options --enable-dmd +ac_add_options --enable-profiling # needed for --enable-dmd to work on Windows ac_add_options --enable-signmar # Needed to enable breakpad in application.ini diff --git a/browser/installer/Makefile.in b/browser/installer/Makefile.in index 3c87dab7d16b..37449ea2b95e 100644 --- a/browser/installer/Makefile.in +++ b/browser/installer/Makefile.in @@ -72,8 +72,10 @@ DEFINES += -DMOZ_CHILD_PROCESS_NAME=$(MOZ_CHILD_PROCESS_NAME) # Set MSVC dlls version to package, if any. ifdef WIN32_REDIST_DIR +ifdef MOZ_NO_DEBUG_RTL DEFINES += -DMOZ_MSVC_REDIST=$(_MSC_VER) endif +endif ifneq (,$(filter WINNT Darwin Android,$(OS_TARGET))) DEFINES += -DMOZ_SHARED_MOZGLUE=1 @@ -136,6 +138,7 @@ endif ifdef MOZ_JEMALLOC3 DEFINES += -DMOZ_JEMALLOC3 endif +DEFINES += -DMOZ_ICU_DBG_SUFFIX=$(MOZ_ICU_DBG_SUFFIX) libs:: $(MAKE) -C $(DEPTH)/browser/locales langpack diff --git a/browser/installer/package-manifest.in b/browser/installer/package-manifest.in index e6f63951bc1d..c28906c33f7a 100644 --- a/browser/installer/package-manifest.in +++ b/browser/installer/package-manifest.in @@ -85,7 +85,6 @@ #endif #ifdef XP_WIN32 @BINPATH@/plugin-hang-ui@BIN_SUFFIX@ -#ifndef MOZ_DEBUG #if MOZ_MSVC_REDIST == 1600 @BINPATH@/msvcp100.dll @BINPATH@/msvcr100.dll @@ -97,19 +96,12 @@ @BINPATH@/msvcr120.dll #endif #endif -#endif #ifndef MOZ_NATIVE_ICU #ifdef MOZ_SHARED_ICU #ifdef XP_WIN -#ifdef MOZ_DEBUG -@BINPATH@/icudtd@MOZ_ICU_VERSION@.dll -@BINPATH@/icuind@MOZ_ICU_VERSION@.dll -@BINPATH@/icuucd@MOZ_ICU_VERSION@.dll -#else -@BINPATH@/icudt@MOZ_ICU_VERSION@.dll -@BINPATH@/icuin@MOZ_ICU_VERSION@.dll -@BINPATH@/icuuc@MOZ_ICU_VERSION@.dll -#endif +@BINPATH@/icudt@MOZ_ICU_DBG_SUFFIX@@MOZ_ICU_VERSION@.dll +@BINPATH@/icuin@MOZ_ICU_DBG_SUFFIX@@MOZ_ICU_VERSION@.dll +@BINPATH@/icuuc@MOZ_ICU_DBG_SUFFIX@@MOZ_ICU_VERSION@.dll #elif defined(XP_MACOSX) @BINPATH@/libicudata.@MOZ_ICU_VERSION@.dylib @BINPATH@/libicui18n.@MOZ_ICU_VERSION@.dylib @@ -861,7 +853,7 @@ bin/libfreebl_32int64_3.so #endif @BINPATH@/components/DataStore.manifest -@BINPATH@/components/DataStoreService.js +@BINPATH@/components/DataStoreImpl.js @BINPATH@/components/dom_datastore.xpt diff --git a/build/autoconf/compiler-opts.m4 b/build/autoconf/compiler-opts.m4 index ef3f85e47080..76effb442df1 100644 --- a/build/autoconf/compiler-opts.m4 +++ b/build/autoconf/compiler-opts.m4 @@ -122,7 +122,13 @@ MOZ_ARG_ENABLE_STRING(debug, fi ], MOZ_DEBUG=) -MOZ_DEBUG_ENABLE_DEFS="-DDEBUG -D_DEBUG -DTRACING" +if test -z "$MOZ_DEBUG"; then + MOZ_NO_DEBUG_RTL=1 +fi + +AC_SUBST(MOZ_NO_DEBUG_RTL) + +MOZ_DEBUG_ENABLE_DEFS="-DDEBUG -DTRACING" MOZ_ARG_WITH_STRING(debug-label, [ --with-debug-label=LABELS Define DEBUG_ for each comma-separated diff --git a/build/autoconf/ffi.m4 b/build/autoconf/ffi.m4 index 03e7e7939b52..f2f7d51755a8 100644 --- a/build/autoconf/ffi.m4 +++ b/build/autoconf/ffi.m4 @@ -48,17 +48,21 @@ if test -z "$BUILDING_JS" -o -n "$JS_STANDALONE"; then CFLAGS= ac_configure_args="$ac_configure_args LD=link CPP=\"cl -nologo -EP\" \ CXXCPP=\"cl -nologo -EP\" SHELL=sh.exe" + rtl= + if test -z "$MOZ_NO_DEBUG_RTL" -a -n "$MOZ_DEBUG"; then + rtl=" -DUSE_DEBUG_RTL" + fi case "${target_cpu}" in x86_64) # Need target since MSYS tools into mozilla-build may be 32bit ac_configure_args="$ac_configure_args \ - CC=\"$_topsrcdir/js/src/ctypes/libffi/msvcc.sh -m64\" \ - CXX=\"$_topsrcdir/js/src/ctypes/libffi/msvcc.sh -m64\"" + CC=\"$_topsrcdir/js/src/ctypes/libffi/msvcc.sh -m64$rtl\" \ + CXX=\"$_topsrcdir/js/src/ctypes/libffi/msvcc.sh -m64$rtl\"" ;; *) ac_configure_args="$ac_configure_args \ - CC=$_topsrcdir/js/src/ctypes/libffi/msvcc.sh \ - CXX=$_topsrcdir/js/src/ctypes/libffi/msvcc.sh" + CC=\"$_topsrcdir/js/src/ctypes/libffi/msvcc.sh$rtl\" \ + CXX=\"$_topsrcdir/js/src/ctypes/libffi/msvcc.sh$rtl\"" ;; esac fi diff --git a/build/autoconf/icu.m4 b/build/autoconf/icu.m4 index 2a7f4a8476fc..b47d03357603 100644 --- a/build/autoconf/icu.m4 +++ b/build/autoconf/icu.m4 @@ -85,11 +85,11 @@ if test -n "$ENABLE_INTL_API"; then WINNT) ICU_LIB_NAMES="icuin icuuc icudt" if test -n "$MOZ_SHARED_ICU"; then - DBG_SUFFIX= - if test -n "$MOZ_DEBUG"; then - DBG_SUFFIX=d + MOZ_ICU_DBG_SUFFIX= + if test -n "$MOZ_DEBUG" -a -z "$MOZ_NO_DEBUG_RTL"; then + MOZ_ICU_DBG_SUFFIX=d fi - MOZ_ICU_LIBS='$(foreach lib,$(ICU_LIB_NAMES),$(DEPTH)/intl/icu/target/lib/$(LIB_PREFIX)$(lib)$(DBG_SUFFIX).$(LIB_SUFFIX))' + MOZ_ICU_LIBS='$(foreach lib,$(ICU_LIB_NAMES),$(DEPTH)/intl/icu/target/lib/$(LIB_PREFIX)$(lib)$(MOZ_ICU_DBG_SUFFIX).$(LIB_SUFFIX))' fi ;; Darwin) @@ -113,7 +113,7 @@ if test -n "$ENABLE_INTL_API"; then fi fi -AC_SUBST(DBG_SUFFIX) +AC_SUBST(MOZ_ICU_DBG_SUFFIX) AC_SUBST(ENABLE_INTL_API) AC_SUBST(ICU_LIB_NAMES) AC_SUBST(MOZ_ICU_LIBS) @@ -259,7 +259,7 @@ if test -z "$BUILDING_JS" -o -n "$JS_STANDALONE"; then # But, not debug build. ICU_CFLAGS="$ICU_CFLAGS -UDEBUG -DNDEBUG" ICU_CXXFLAGS="$ICU_CXXFLAGS -UDEBUG -DNDEBUG" - else + elif test -z "$MOZ_NO_DEBUG_RTL"; then ICU_BUILD_OPTS="$ICU_BUILD_OPTS --enable-debug" fi fi @@ -284,7 +284,7 @@ if test -z "$BUILDING_JS" -o -n "$JS_STANDALONE"; then fi # Add RTL flags for MSVCRT.DLL - if test -n "$MOZ_DEBUG"; then + if test -n "$MOZ_DEBUG" -a -z "$MOZ_NO_DEBUG_RTL"; then ICU_CFLAGS="$ICU_CFLAGS -MDd" ICU_CXXFLAGS="$ICU_CXXFLAGS -MDd" else diff --git a/build/gyp.mozbuild b/build/gyp.mozbuild index f010084e4546..b3b7dd603e17 100644 --- a/build/gyp.mozbuild +++ b/build/gyp.mozbuild @@ -27,6 +27,8 @@ gyp_vars = { 'build_libyuv': 0, 'libyuv_dir': '/media/libyuv', 'yuv_disable_avx2': 0 if CONFIG['HAVE_X86_AVX2'] else 1, + # don't use openssl + 'use_openssl': 0, # saves 4MB when webrtc_trace is off 'enable_lazy_trace_alloc': 1, diff --git a/build/win32/Makefile.in b/build/win32/Makefile.in index d725c9459f68..a9d59d278798 100644 --- a/build/win32/Makefile.in +++ b/build/win32/Makefile.in @@ -7,7 +7,6 @@ MOZ_GLUE_LDFLAGS = include $(topsrcdir)/config/rules.mk ifdef WIN32_REDIST_DIR -ifndef MOZ_DEBUG ifeq (1600,$(_MSC_VER)) REDIST_FILES = \ @@ -39,7 +38,6 @@ libs:: $(libs-preqs) install --preserve-timestamps $(foreach f,$(REDIST_FILES),'$(WIN32_REDIST_DIR)'/$(f)) $(FINAL_TARGET) endif -endif # ! MOZ_DEBUG endif # WIN32_REDIST_DIR # run the binscope tool to make sure the binary and all libraries diff --git a/config/msvc-stl-wrapper.template.h b/config/msvc-stl-wrapper.template.h index b22cb2206c1d..4b3b9fcfd900 100644 --- a/config/msvc-stl-wrapper.template.h +++ b/config/msvc-stl-wrapper.template.h @@ -36,13 +36,13 @@ # error "STL code can only be used with infallible ::operator new()" #endif -#ifdef DEBUG +#ifdef _DEBUG // From // http://msdn.microsoft.com/en-us/library/aa985982%28VS.80%29.aspx // and // http://msdn.microsoft.com/en-us/library/aa985965%28VS.80%29.aspx // there appear to be two types of STL container checking. The -// former is enabled by -D_DEBUG (which is implied by -DDEBUG), and +// former is enabled by -D_DEBUG (which is implied by -MDd or -MTd), and // looks to be full generation/mutation checked iterators as done by // _GLIBCXX_DEBUG. The latter appears to just be bounds checking, and // is enabled by the following macros. It appears that the _DEBUG diff --git a/config/system-headers b/config/system-headers index 6c0049395ecc..dc20eb5caf4b 100644 --- a/config/system-headers +++ b/config/system-headers @@ -347,7 +347,6 @@ commdlg.h compat.h condapi.h ConditionalMacros.h -config.h conio.h console.h ControlDefinitions.h diff --git a/configure.in b/configure.in index 0c9344acb58b..6d9f390d569f 100644 --- a/configure.in +++ b/configure.in @@ -2189,6 +2189,7 @@ ia64*-hpux*) dnl both SSSE3 and SSE4.1. HAVE_TOOLCHAIN_SUPPORT_MSSSE3=1 HAVE_TOOLCHAIN_SUPPORT_MSSE4_1=1 + MOZ_MEMORY=1 fi AC_DEFINE(HAVE_SNPRINTF) AC_DEFINE(_WINDOWS) @@ -7114,16 +7115,13 @@ else ;; *-mingw*) AC_DEFINE(MOZ_MEMORY_WINDOWS) - if test -z "$MOZ_DEBUG"; then - WIN32_CRT_LIBS="msvcrt.lib msvcprt.lib" - else - WIN32_CRT_LIBS="msvcrtd.lib msvcprtd.lib" - fi + export MOZ_NO_DEBUG_RTL=1 + WIN32_CRT_LIBS="msvcrt.lib msvcprt.lib" dnl Look for a broken crtdll.obj WIN32_CRTDLL_FULLPATH=`lib -nologo -list $WIN32_CRT_LIBS | grep crtdll\\.obj` lib -NOLOGO -OUT:crtdll.obj $WIN32_CRT_LIBS -EXTRACT:$WIN32_CRTDLL_FULLPATH if grep -q '__imp__\{0,1\}free' crtdll.obj; then - MOZ_GLUE_LDFLAGS='-LIBPATH:$(DIST)/lib -NODEFAULTLIB:msvcrt -NODEFAULTLIB:msvcrtd -NODEFAULTLIB:msvcprt -NODEFAULTLIB:msvcprtd -DEFAULTLIB:mozcrt' + MOZ_GLUE_LDFLAGS='-LIBPATH:$(DIST)/lib -NODEFAULTLIB:msvcrt -NODEFAULTLIB:msvcprt -DEFAULTLIB:mozcrt' dnl Also pass this to NSPR/NSS DLLFLAGS="$DLLFLAGS $MOZ_GLUE_LDFLAGS" else @@ -9215,6 +9213,9 @@ if test -z "$MOZ_NATIVE_NSPR"; then ac_configure_args="$ac_configure_args --disable-debug" else ac_configure_args="$ac_configure_args --enable-debug" + if test -n "$MOZ_NO_DEBUG_RTL"; then + ac_configure_args="$ac_configure_args --disable-debug-rtl" + fi fi if test "$MOZ_OPTIMIZE" = "1"; then ac_configure_args="$ac_configure_args --enable-optimize" diff --git a/content/base/public/nsViewportInfo.h b/content/base/public/nsViewportInfo.h index 1eae0099bc21..e7345de89c28 100644 --- a/content/base/public/nsViewportInfo.h +++ b/content/base/public/nsViewportInfo.h @@ -32,7 +32,7 @@ class MOZ_STACK_CLASS nsViewportInfo mAllowZoom(aAllowZoom), mAllowDoubleTapZoom(aAllowDoubleTapZoom) { - mSize = mozilla::gfx::RoundedToInt(mozilla::ScreenSize(aDisplaySize) / mDefaultZoom); + mSize = mozilla::ScreenSize(aDisplaySize) / mDefaultZoom; mozilla::CSSToLayoutDeviceScale pixelRatio(1.0f); mMinZoom = pixelRatio * kViewportMinScale; mMaxZoom = pixelRatio * kViewportMaxScale; @@ -42,7 +42,7 @@ class MOZ_STACK_CLASS nsViewportInfo nsViewportInfo(const mozilla::CSSToScreenScale& aDefaultZoom, const mozilla::CSSToScreenScale& aMinZoom, const mozilla::CSSToScreenScale& aMaxZoom, - const mozilla::CSSIntSize& aSize, + const mozilla::CSSSize& aSize, bool aAutoSize, bool aAllowZoom, bool aAllowDoubleTapZoom) : @@ -62,7 +62,7 @@ class MOZ_STACK_CLASS nsViewportInfo mozilla::CSSToScreenScale GetMinZoom() { return mMinZoom; } mozilla::CSSToScreenScale GetMaxZoom() { return mMaxZoom; } - mozilla::CSSIntSize GetSize() { return mSize; } + mozilla::CSSSize GetSize() { return mSize; } bool IsAutoSizeEnabled() { return mAutoSize; } bool IsZoomAllowed() { return mAllowZoom; } @@ -90,7 +90,7 @@ class MOZ_STACK_CLASS nsViewportInfo mozilla::CSSToScreenScale mMaxZoom; // The size of the viewport, specified by the tag. - mozilla::CSSIntSize mSize; + mozilla::CSSSize mSize; // Whether or not we should automatically size the viewport to the device's // width. This is true if the document has been optimized for mobile, and diff --git a/content/base/src/contentSecurityPolicy.js b/content/base/src/contentSecurityPolicy.js index ff0702580225..cb1ce4d11262 100644 --- a/content/base/src/contentSecurityPolicy.js +++ b/content/base/src/contentSecurityPolicy.js @@ -647,7 +647,7 @@ ContentSecurityPolicy.prototype = { } policy.log(WARN_FLAG, violationMessage, (aSourceFile) ? aSourceFile : null, - (aScriptSample) ? decodeURIComponent(aScriptSample) : null, + (aScriptSample) ? aScriptSample : null, (aLineNum) ? aLineNum : null); }, diff --git a/content/base/src/nsDocument.cpp b/content/base/src/nsDocument.cpp index bb7f9de68257..c692b3305b9a 100644 --- a/content/base/src/nsDocument.cpp +++ b/content/base/src/nsDocument.cpp @@ -7625,11 +7625,11 @@ nsDocument::GetViewportInfo(const ScreenIntSize& aDisplaySize) } case Specified: default: - CSSIntSize size = mViewportSize; + CSSSize size = mViewportSize; if (!mValidWidth) { if (mValidHeight && !aDisplaySize.IsEmpty()) { - size.width = int32_t(size.height * aDisplaySize.width / aDisplaySize.height); + size.width = size.height * aDisplaySize.width / aDisplaySize.height; } else { size.width = Preferences::GetInt("browser.viewport.desktopWidth", kViewportDefaultScreenWidth); @@ -7638,7 +7638,7 @@ nsDocument::GetViewportInfo(const ScreenIntSize& aDisplaySize) if (!mValidHeight) { if (!aDisplaySize.IsEmpty()) { - size.height = int32_t(size.width * aDisplaySize.height / aDisplaySize.width); + size.height = size.width * aDisplaySize.height / aDisplaySize.width; } else { size.height = size.width; } @@ -7654,28 +7654,28 @@ nsDocument::GetViewportInfo(const ScreenIntSize& aDisplaySize) if (mAutoSize) { // aDisplaySize is in screen pixels; convert them to CSS pixels for the viewport size. CSSToScreenScale defaultPixelScale = pixelRatio * LayoutDeviceToScreenScale(1.0f); - size = mozilla::gfx::RoundedToInt(ScreenSize(aDisplaySize) / defaultPixelScale); + size = ScreenSize(aDisplaySize) / defaultPixelScale; } - size.width = clamped(size.width, kViewportMinSize.width, kViewportMaxSize.width); + size.width = clamped(size.width, float(kViewportMinSize.width), float(kViewportMaxSize.width)); // Also recalculate the default zoom, if it wasn't specified in the metadata, // and the width is specified. if (mScaleStrEmpty && !mWidthStrEmpty) { - CSSToScreenScale defaultScale(float(aDisplaySize.width) / float(size.width)); + CSSToScreenScale defaultScale(float(aDisplaySize.width) / size.width); scaleFloat = (scaleFloat > defaultScale) ? scaleFloat : defaultScale; } - size.height = clamped(size.height, kViewportMinSize.height, kViewportMaxSize.height); + size.height = clamped(size.height, float(kViewportMinSize.height), float(kViewportMaxSize.height)); // We need to perform a conversion, but only if the initial or maximum // scale were set explicitly by the user. if (mValidScaleFloat) { - CSSIntSize displaySize = RoundedToInt(ScreenSize(aDisplaySize) / scaleFloat); + CSSSize displaySize = ScreenSize(aDisplaySize) / scaleFloat; size.width = std::max(size.width, displaySize.width); size.height = std::max(size.height, displaySize.height); } else if (mValidMaxScale) { - CSSIntSize displaySize = RoundedToInt(ScreenSize(aDisplaySize) / scaleMaxFloat); + CSSSize displaySize = ScreenSize(aDisplaySize) / scaleMaxFloat; size.width = std::max(size.width, displaySize.width); size.height = std::max(size.height, displaySize.height); } diff --git a/content/base/src/nsDocument.h b/content/base/src/nsDocument.h index f061ca7c524a..d3b486b0f60a 100644 --- a/content/base/src/nsDocument.h +++ b/content/base/src/nsDocument.h @@ -1692,7 +1692,7 @@ private: mozilla::LayoutDeviceToScreenScale mScaleFloat; mozilla::CSSToLayoutDeviceScale mPixelRatio; bool mAutoSize, mAllowZoom, mAllowDoubleTapZoom, mValidScaleFloat, mValidMaxScale, mScaleStrEmpty, mWidthStrEmpty; - mozilla::CSSIntSize mViewportSize; + mozilla::CSSSize mViewportSize; nsrefcnt mStackRefCnt; bool mNeedsReleaseAfterStackRefCntRelease; diff --git a/content/media/webrtc/MediaEngineWebRTC.cpp b/content/media/webrtc/MediaEngineWebRTC.cpp index c8f2e06b043a..7be24c73879c 100644 --- a/content/media/webrtc/MediaEngineWebRTC.cpp +++ b/content/media/webrtc/MediaEngineWebRTC.cpp @@ -131,12 +131,10 @@ MediaEngineWebRTC::EnumerateVideoDevices(nsTArrayGetGlobalContextRef(); - // get the JVM JavaVM *jvm = mozilla::AndroidBridge::Bridge()->GetVM(); - if (webrtc::VideoEngine::SetAndroidObjects(jvm, (void*)context) != 0) { + if (webrtc::VideoEngine::SetAndroidObjects(jvm) != 0) { LOG(("VieCapture:SetAndroidObjects Failed")); return; } diff --git a/content/media/webrtc/MediaEngineWebRTC.h b/content/media/webrtc/MediaEngineWebRTC.h index 755ae3ef37ed..7482e7e000f3 100644 --- a/content/media/webrtc/MediaEngineWebRTC.h +++ b/content/media/webrtc/MediaEngineWebRTC.h @@ -43,6 +43,8 @@ #include "webrtc/voice_engine/include/voe_call_report.h" // Video Engine +// conflicts with #include of scoped_ptr.h +#undef FF #include "webrtc/video_engine/include/vie_base.h" #include "webrtc/video_engine/include/vie_codec.h" #include "webrtc/video_engine/include/vie_render.h" diff --git a/docshell/base/nsDocShell.cpp b/docshell/base/nsDocShell.cpp index aaf40c776526..5470f5cd1715 100644 --- a/docshell/base/nsDocShell.cpp +++ b/docshell/base/nsDocShell.cpp @@ -5565,11 +5565,19 @@ nsDocShell::GetVisibility(bool * aVisibility) return NS_OK; } - nsIContent *shellContent = - pPresShell->GetDocument()->FindContentForSubDocument(presShell->GetDocument()); - NS_ASSERTION(shellContent, "subshell not in the map"); + vm = presShell->GetViewManager(); + if (vm) { + view = vm->GetRootView(); + } - nsIFrame* frame = shellContent ? shellContent->GetPrimaryFrame() : nullptr; + if (view) { + view = view->GetParent(); // anonymous inner view + if (view) { + view = view->GetParent(); // subdocumentframe's view + } + } + + nsIFrame* frame = view ? view->GetFrame() : nullptr; bool isDocShellOffScreen = false; docShell->GetIsOffScreenBrowser(&isDocShellOffScreen); if (frame && diff --git a/dom/base/Navigator.cpp b/dom/base/Navigator.cpp index 393dd9f251be..f0b18c263e47 100644 --- a/dom/base/Navigator.cpp +++ b/dom/base/Navigator.cpp @@ -82,7 +82,7 @@ #endif #include "nsIDOMGlobalPropertyInitializer.h" -#include "nsIDataStoreService.h" +#include "mozilla/dom/DataStoreService.h" #include "nsJSUtils.h" #include "nsScriptNameSpaceManager.h" @@ -1466,8 +1466,7 @@ Navigator::GetDataStores(nsPIDOMWindow* aWindow, return nullptr; } - nsCOMPtr service = - do_GetService("@mozilla.org/datastore-service;1"); + nsRefPtr service = DataStoreService::GetOrCreate(); if (!service) { aRv.Throw(NS_ERROR_FAILURE); return nullptr; diff --git a/dom/base/URL.cpp b/dom/base/URL.cpp index 32166267977d..cffd0ac1a04c 100644 --- a/dom/base/URL.cpp +++ b/dom/base/URL.cpp @@ -341,7 +341,18 @@ URL::UpdateURLSearchParams() void URL::GetHostname(nsString& aHostname) const { - URL_GETTER(aHostname, GetHost); + aHostname.Truncate(); + nsAutoCString tmp; + nsresult rv = mURI->GetHost(tmp); + if (NS_SUCCEEDED(rv)) { + if (tmp.FindChar(':') != -1) { // Escape IPv6 address + MOZ_ASSERT(!tmp.Length() || + (tmp[0] !='[' && tmp[tmp.Length() - 1] != ']')); + tmp.Insert('[', 0); + tmp.Append(']'); + } + CopyUTF8toUTF16(tmp, aHostname); + } } void diff --git a/dom/base/nsDOMClassInfo.cpp b/dom/base/nsDOMClassInfo.cpp index 70737f17f144..bfaad8b42a7b 100644 --- a/dom/base/nsDOMClassInfo.cpp +++ b/dom/base/nsDOMClassInfo.cpp @@ -49,7 +49,6 @@ #include "nsIDOMEventListener.h" #include "nsContentUtils.h" #include "nsCxPusher.h" -#include "nsIDOMWindowUtils.h" #include "nsIDOMGlobalPropertyInitializer.h" #include "nsLocation.h" #include "mozilla/Attributes.h" @@ -344,9 +343,6 @@ static nsDOMClassInfoData sClassInfoData[] = { NS_DEFINE_CLASSINFO_DATA(CSSSupportsRule, nsDOMGenericSH, DOM_DEFAULT_SCRIPTABLE_FLAGS) - NS_DEFINE_CLASSINFO_DATA(WindowUtils, nsDOMGenericSH, - DOM_DEFAULT_SCRIPTABLE_FLAGS) - NS_DEFINE_CLASSINFO_DATA(XSLTProcessor, nsDOMGenericSH, DOM_DEFAULT_SCRIPTABLE_FLAGS) @@ -854,10 +850,6 @@ nsDOMClassInfo::Init() #endif DOM_CLASSINFO_MAP_END - DOM_CLASSINFO_MAP_BEGIN(WindowUtils, nsIDOMWindowUtils) - DOM_CLASSINFO_MAP_ENTRY(nsIDOMWindowUtils) - DOM_CLASSINFO_MAP_END - DOM_CLASSINFO_MAP_BEGIN(Location, nsIDOMLocation) DOM_CLASSINFO_MAP_ENTRY(nsIDOMLocation) DOM_CLASSINFO_MAP_END diff --git a/dom/base/nsDOMClassInfoClasses.h b/dom/base/nsDOMClassInfoClasses.h index e1ea872a5a9d..edcf9ec62d48 100644 --- a/dom/base/nsDOMClassInfoClasses.h +++ b/dom/base/nsDOMClassInfoClasses.h @@ -42,9 +42,6 @@ DOMCI_CLASS(TreeColumn) DOMCI_CLASS(CSSMozDocumentRule) DOMCI_CLASS(CSSSupportsRule) -// WindowUtils -DOMCI_CLASS(WindowUtils) - // XSLTProcessor DOMCI_CLASS(XSLTProcessor) diff --git a/dom/base/nsDOMWindowUtils.cpp b/dom/base/nsDOMWindowUtils.cpp index d7912d83c8a2..f051fa5770b0 100644 --- a/dom/base/nsDOMWindowUtils.cpp +++ b/dom/base/nsDOMWindowUtils.cpp @@ -103,13 +103,10 @@ class gfxContext; static NS_DEFINE_CID(kAppShellCID, NS_APPSHELL_CID); -DOMCI_DATA(WindowUtils, nsDOMWindowUtils) - NS_INTERFACE_MAP_BEGIN(nsDOMWindowUtils) NS_INTERFACE_MAP_ENTRY_AMBIGUOUS(nsISupports, nsIDOMWindowUtils) NS_INTERFACE_MAP_ENTRY(nsIDOMWindowUtils) NS_INTERFACE_MAP_ENTRY(nsISupportsWeakReference) - NS_DOM_INTERFACE_MAP_ENTRY_CLASSINFO(WindowUtils) NS_INTERFACE_MAP_END NS_IMPL_ADDREF(nsDOMWindowUtils) @@ -318,8 +315,9 @@ nsDOMWindowUtils::GetViewportInfo(uint32_t aDisplayWidth, *aAllowZoom = info.IsZoomAllowed(); *aMinZoom = info.GetMinZoom().scale; *aMaxZoom = info.GetMaxZoom().scale; - *aWidth = info.GetSize().width; - *aHeight = info.GetSize().height; + CSSIntSize size = gfx::RoundedToInt(info.GetSize()); + *aWidth = size.width; + *aHeight = size.height; *aAutoSize = info.IsAutoSizeEnabled(); return NS_OK; } diff --git a/dom/base/nsJSEnvironment.cpp b/dom/base/nsJSEnvironment.cpp index 1183344f4f11..42c85250389b 100644 --- a/dom/base/nsJSEnvironment.cpp +++ b/dom/base/nsJSEnvironment.cpp @@ -1657,8 +1657,10 @@ nsJSContext::InitClasses(JS::Handle aGlobalObj) #endif #ifdef MOZ_DMD - // Attempt to initialize DMD functions - ::JS_DefineFunctions(cx, aGlobalObj, DMDFunctions); + if (nsContentUtils::IsCallerChrome()) { + // Attempt to initialize DMD functions + ::JS_DefineFunctions(cx, aGlobalObj, DMDFunctions); + } #endif #ifdef MOZ_JPROF diff --git a/dom/base/test/test_url.html b/dom/base/test/test_url.html index 3baa59d397e7..7b7e8ba4bd63 100644 --- a/dom/base/test/test_url.html +++ b/dom/base/test/test_url.html @@ -292,5 +292,32 @@ url.hostname = ""; is(url.href, "http://localhost/", "Empty hostname is ignored"); + + diff --git a/dom/datastore/DataStore.manifest b/dom/datastore/DataStore.manifest index 204682d116dd..409bcdc9586e 100644 --- a/dom/datastore/DataStore.manifest +++ b/dom/datastore/DataStore.manifest @@ -1,2 +1,2 @@ -component {d193d0e2-c677-4a7b-bb0a-19155b470f2e} DataStoreService.js -contract @mozilla.org/datastore-service;1 {d193d0e2-c677-4a7b-bb0a-19155b470f2e} +component {db5c9602-030f-4bff-a3de-881a8de370f2} DataStoreImpl.js +contract @mozilla.org/dom/datastore;1 {db5c9602-030f-4bff-a3de-881a8de370f2} diff --git a/dom/datastore/DataStoreCallbacks.h b/dom/datastore/DataStoreCallbacks.h new file mode 100644 index 000000000000..44ed587b1663 --- /dev/null +++ b/dom/datastore/DataStoreCallbacks.h @@ -0,0 +1,48 @@ +/* -*- Mode: c++; c-basic-offset: 2; indent-tabs-mode: nil; tab-width: 40 -*- */ +/* vim: set ts=2 et sw=2 tw=80: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef mozilla_dom_DataStoreCallbacks_h +#define mozilla_dom_DataStoreCallbacks_h + +#include "nsISupports.h" + +namespace mozilla { +namespace dom { + +class DataStoreDB; + +class DataStoreDBCallback +{ +public: + NS_IMETHOD_(MozExternalRefCountType) AddRef(void) = 0; + NS_IMETHOD_(MozExternalRefCountType) Release(void) = 0; + + virtual void Run(DataStoreDB* aDb, bool aSuccess) = 0; + +protected: + virtual ~DataStoreDBCallback() + { + } +}; + +class DataStoreRevisionCallback +{ +public: + NS_IMETHOD_(MozExternalRefCountType) AddRef(void) = 0; + NS_IMETHOD_(MozExternalRefCountType) Release(void) = 0; + + virtual void Run(const nsAString& aRevisionID) = 0; + +protected: + virtual ~DataStoreRevisionCallback() + { + } +}; + +} // dom namespace +} // mozilla namespace + +#endif // mozilla_dom_DataStoreCallbacks_h diff --git a/dom/datastore/DataStoreChangeNotifier.jsm b/dom/datastore/DataStoreChangeNotifier.jsm index 0f9d20d6d93b..19eb287f2d1c 100644 --- a/dom/datastore/DataStoreChangeNotifier.jsm +++ b/dom/datastore/DataStoreChangeNotifier.jsm @@ -12,9 +12,6 @@ function debug(s) { //dump('DEBUG DataStoreChangeNotifier: ' + s + '\n'); } -// DataStoreServiceInternal should not be converted into a lazy getter as it -// runs code during initialization. -Cu.import('resource://gre/modules/DataStoreServiceInternal.jsm'); Cu.import("resource://gre/modules/XPCOMUtils.jsm"); Cu.import("resource://gre/modules/Services.jsm"); diff --git a/dom/datastore/DataStoreDB.cpp b/dom/datastore/DataStoreDB.cpp new file mode 100644 index 000000000000..ecb73926e3b0 --- /dev/null +++ b/dom/datastore/DataStoreDB.cpp @@ -0,0 +1,319 @@ +/* -*- Mode: c++; c-basic-offset: 2; indent-tabs-mode: nil; tab-width: 40 -*- */ +/* vim: set ts=2 et sw=2 tw=80: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "DataStoreDB.h" + +#include "DataStoreCallbacks.h" +#include "mozilla/dom/IDBDatabaseBinding.h" +#include "mozilla/dom/IDBFactoryBinding.h" +#include "mozilla/dom/indexedDB/IDBDatabase.h" +#include "mozilla/dom/indexedDB/IDBFactory.h" +#include "mozilla/dom/indexedDB/IDBIndex.h" +#include "mozilla/dom/indexedDB/IDBObjectStore.h" +#include "mozilla/dom/indexedDB/IDBRequest.h" +#include "nsIDOMEvent.h" + +#define DATASTOREDB_VERSION 1 +#define DATASTOREDB_NAME "DataStoreDB" +#define DATASTOREDB_REVISION_INDEX "revisionIndex" + +using namespace mozilla::dom::indexedDB; + +namespace mozilla { +namespace dom { + +NS_IMPL_ISUPPORTS(DataStoreDB, nsIDOMEventListener) + +DataStoreDB::DataStoreDB(const nsAString& aManifestURL, const nsAString& aName) + : mState(Inactive) +{ + mDatabaseName.Assign(aName); + mDatabaseName.AppendASCII("|"); + mDatabaseName.Append(aManifestURL); +} + +DataStoreDB::~DataStoreDB() +{ +} + +nsresult +DataStoreDB::CreateFactoryIfNeeded() +{ + if (!mFactory) { + nsresult rv = IDBFactory::Create(nullptr, getter_AddRefs(mFactory)); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + } + + return NS_OK; +} + +nsresult +DataStoreDB::Open(IDBTransactionMode aMode, const Sequence& aDbs, + DataStoreDBCallback* aCallback) +{ + MOZ_ASSERT(mState == Inactive); + + nsresult rv = CreateFactoryIfNeeded(); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + ErrorResult error; + mRequest = mFactory->Open(mDatabaseName, DATASTOREDB_VERSION, error); + if (NS_WARN_IF(error.Failed())) { + return error.ErrorCode(); + } + + rv = AddEventListeners(); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + mState = Active; + mTransactionMode = aMode; + mObjectStores = aDbs; + mCallback = aCallback; + return NS_OK; +} + +NS_IMETHODIMP +DataStoreDB::HandleEvent(nsIDOMEvent* aEvent) +{ + nsString type; + nsresult rv = aEvent->GetType(type); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + if (type.EqualsASCII("success")) { + RemoveEventListeners(); + mState = Inactive; + + rv = DatabaseOpened(); + if (NS_WARN_IF(NS_FAILED(rv))) { + mCallback->Run(this, false); + } else { + mCallback->Run(this, true); + } + + mRequest = nullptr; + return NS_OK; + } + + if (type.EqualsASCII("upgradeneeded")) { + return UpgradeSchema(); + } + + if (type.EqualsASCII("error") || type.EqualsASCII("blocked")) { + RemoveEventListeners(); + mState = Inactive; + mCallback->Run(this, false); + mRequest = nullptr; + return NS_OK; + } + + MOZ_ASSUME_UNREACHABLE("This should not happen"); + return NS_OK; +} + +nsresult +DataStoreDB::UpgradeSchema() +{ + MOZ_ASSERT(NS_IsMainThread()); + + AutoSafeJSContext cx; + + ErrorResult error; + JS::Rooted result(cx, mRequest->GetResult(error)); + if (NS_WARN_IF(error.Failed())) { + return error.ErrorCode(); + } + + MOZ_ASSERT(result.isObject()); + + IDBDatabase* database = nullptr; + nsresult rv = UNWRAP_OBJECT(IDBDatabase, &result.toObject(), database); + if (NS_FAILED(rv)) { + NS_WARNING("Didn't get the object we expected!"); + return rv; + } + + { + RootedDictionary params(cx); + params.Init(NS_LITERAL_STRING("{ \"autoIncrement\": true }")); + nsRefPtr store = + database->CreateObjectStore(cx, NS_LITERAL_STRING(DATASTOREDB_NAME), + params, error); + if (NS_WARN_IF(error.Failed())) { + return error.ErrorCode(); + } + } + + nsRefPtr store; + + { + RootedDictionary params(cx); + params.Init(NS_LITERAL_STRING("{ \"autoIncrement\": true, \"keyPath\": \"internalRevisionId\" }")); + + store = + database->CreateObjectStore(cx, NS_LITERAL_STRING(DATASTOREDB_REVISION), + params, error); + if (NS_WARN_IF(error.Failed())) { + return error.ErrorCode(); + } + } + + { + RootedDictionary params(cx); + params.Init(NS_LITERAL_STRING("{ \"unique\": true }")); + nsRefPtr index = + store->CreateIndex(cx, NS_LITERAL_STRING(DATASTOREDB_REVISION_INDEX), + NS_LITERAL_STRING("revisionId"), params, error); + if (NS_WARN_IF(error.Failed())) { + return error.ErrorCode(); + } + } + + return NS_OK; +} + +nsresult +DataStoreDB::DatabaseOpened() +{ + MOZ_ASSERT(NS_IsMainThread()); + + AutoSafeJSContext cx; + + ErrorResult error; + JS::Rooted result(cx, mRequest->GetResult(error)); + if (NS_WARN_IF(error.Failed())) { + return error.ErrorCode(); + } + + MOZ_ASSERT(result.isObject()); + + nsresult rv = UNWRAP_OBJECT(IDBDatabase, &result.toObject(), mDatabase); + if (NS_FAILED(rv)) { + NS_WARNING("Didn't get the object we expected!"); + return rv; + } + + nsRefPtr txn = mDatabase->Transaction(mObjectStores, + mTransactionMode, + error); + if (NS_WARN_IF(error.Failed())) { + return error.ErrorCode(); + } + + mTransaction = txn.forget(); + return NS_OK; +} + +nsresult +DataStoreDB::Delete() +{ + MOZ_ASSERT(mState == Inactive); + + nsresult rv = CreateFactoryIfNeeded(); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + mTransaction = nullptr; + + if (mDatabase) { + rv = mDatabase->Close(); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + mDatabase = nullptr; + } + + ErrorResult error; + nsRefPtr request = + mFactory->DeleteDatabase(mDatabaseName, IDBOpenDBOptions(), error); + if (NS_WARN_IF(error.Failed())) { + return error.ErrorCode(); + } + + return NS_OK; +} + +indexedDB::IDBTransaction* +DataStoreDB::Transaction() const +{ + MOZ_ASSERT(mTransaction); + MOZ_ASSERT(mTransaction->IsOpen()); + return mTransaction; +} + +nsresult +DataStoreDB::AddEventListeners() +{ + nsresult rv; + rv = mRequest->EventTarget::AddEventListener(NS_LITERAL_STRING("success"), + this, false); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + rv = mRequest->EventTarget::AddEventListener(NS_LITERAL_STRING("upgradeneeded"), + this, false); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + rv = mRequest->EventTarget::AddEventListener(NS_LITERAL_STRING("error"), + this, false); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + rv = mRequest->EventTarget::AddEventListener(NS_LITERAL_STRING("blocked"), + this, false); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + return NS_OK; +} + +nsresult +DataStoreDB::RemoveEventListeners() +{ + nsresult rv; + rv = mRequest->RemoveEventListener(NS_LITERAL_STRING("success"), + this, false); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + rv = mRequest->RemoveEventListener(NS_LITERAL_STRING("upgradeneeded"), + this, false); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + rv = mRequest->RemoveEventListener(NS_LITERAL_STRING("error"), + this, false); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + rv = mRequest->RemoveEventListener(NS_LITERAL_STRING("blocked"), + this, false); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + return NS_OK; +} + +} // namespace dom +} // namespace mozilla diff --git a/dom/datastore/DataStoreDB.h b/dom/datastore/DataStoreDB.h new file mode 100644 index 000000000000..9a782b7d0799 --- /dev/null +++ b/dom/datastore/DataStoreDB.h @@ -0,0 +1,82 @@ +/* -*- Mode: c++; c-basic-offset: 2; indent-tabs-mode: nil; tab-width: 40 -*- */ +/* vim: set ts=2 et sw=2 tw=80: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef mozilla_dom_DataStoreDB_h +#define mozilla_dom_DataStoreDB_h + +#include "mozilla/dom/IDBTransactionBinding.h" +#include "nsAutoPtr.h" +#include "nsIDOMEventListener.h" +#include "nsISupportsImpl.h" +#include "nsString.h" + +#define DATASTOREDB_REVISION "revision" + +namespace mozilla { +namespace dom { + +namespace indexedDB { +class IDBDatabase; +class IDBFactory; +class IDBObjectStore; +class IDBOpenDBRequest; +class IDBTransaction; +} + +class DataStoreDBCallback; + +class DataStoreDB MOZ_FINAL : public nsIDOMEventListener +{ +public: + NS_DECL_ISUPPORTS + + DataStoreDB(const nsAString& aManifestURL, const nsAString& aName); + ~DataStoreDB(); + + nsresult Open(IDBTransactionMode aMode, const Sequence& aDb, + DataStoreDBCallback* aCallback); + + nsresult Delete(); + + indexedDB::IDBTransaction* Transaction() const; + + // nsIDOMEventListener + NS_IMETHOD HandleEvent(nsIDOMEvent* aEvent); + +private: + nsresult CreateFactoryIfNeeded(); + + nsresult UpgradeSchema(); + + nsresult DatabaseOpened(); + + nsresult AddEventListeners(); + + nsresult RemoveEventListeners(); + + nsString mDatabaseName; + + nsRefPtr mFactory; + nsRefPtr mRequest; + nsRefPtr mDatabase; + nsRefPtr mTransaction; + + nsRefPtr mCallback; + + // Internal state to avoid strange use of this class. + enum StateType { + Inactive, + Active + } mState; + + IDBTransactionMode mTransactionMode; + Sequence mObjectStores; +}; + +} // namespace dom +} // namespace mozilla + +#endif // mozilla_dom_DataStoreDB_h diff --git a/dom/datastore/DataStoreImpl.jsm b/dom/datastore/DataStoreImpl.js similarity index 97% rename from dom/datastore/DataStoreImpl.jsm rename to dom/datastore/DataStoreImpl.js index c8825e5f5e61..21285ccda499 100644 --- a/dom/datastore/DataStoreImpl.jsm +++ b/dom/datastore/DataStoreImpl.js @@ -6,8 +6,6 @@ 'use strict' -this.EXPORTED_SYMBOLS = ["DataStore"]; - function debug(s) { //dump('DEBUG DataStore: ' + s + '\n'); } @@ -59,17 +57,16 @@ function validateId(aId) { } /* DataStore object */ -this.DataStore = function(aWindow, aName, aOwner, aReadOnly) { +function DataStore() { debug("DataStore created"); - this.init(aWindow, aName, aOwner, aReadOnly); } -this.DataStore.prototype = { +DataStore.prototype = { classDescription: "DataStore XPCOM Component", classID: Components.ID("{db5c9602-030f-4bff-a3de-881a8de370f2}"), contractID: "@mozilla.org/dom/datastore-impl;1", - QueryInterface: XPCOMUtils.generateQI([Components.interfaces.nsISupports, - Components.interfaces.nsIObserver]), + QueryInterface: XPCOMUtils.generateQI([Ci.nsIDataStore, Ci.nsISupports, + Ci.nsIObserver]), callbacks: [], @@ -536,3 +533,5 @@ this.DataStore.prototype = { return exposedCursor; } }; + +this.NSGetFactory = XPCOMUtils.generateNSGetFactory([DataStore]); diff --git a/dom/datastore/DataStoreRevision.cpp b/dom/datastore/DataStoreRevision.cpp new file mode 100644 index 000000000000..daa4f99cf177 --- /dev/null +++ b/dom/datastore/DataStoreRevision.cpp @@ -0,0 +1,102 @@ +/* -*- Mode: c++; c-basic-offset: 2; indent-tabs-mode: nil; tab-width: 40 -*- */ +/* vim: set ts=2 et sw=2 tw=80: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "DataStoreRevision.h" + +#include "DataStoreCallbacks.h" +#include "DataStoreService.h" +#include "mozilla/dom/DataStoreBinding.h" +#include "mozilla/dom/indexedDB/IDBObjectStore.h" +#include "nsIDOMEvent.h" + +namespace mozilla { +namespace dom { + +using namespace indexedDB; + +NS_IMPL_ISUPPORTS(DataStoreRevision, nsIDOMEventListener) + +// Note: this code in it must not assume anything about the compartment cx is +// in. +nsresult +DataStoreRevision::AddRevision(JSContext* aCx, + IDBObjectStore* aStore, + uint32_t aObjectId, + RevisionType aRevisionType, + DataStoreRevisionCallback* aCallback) +{ + MOZ_ASSERT(aStore); + MOZ_ASSERT(aCallback); + + nsRefPtr service = DataStoreService::Get(); + if (!service) { + return NS_ERROR_FAILURE; + } + + nsString id; + nsresult rv = service->GenerateUUID(mRevisionID); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + DataStoreRevisionData data; + data.mRevisionId = mRevisionID; + data.mObjectId = aObjectId; + + switch (aRevisionType) { + case RevisionVoid: + data.mOperation = NS_LITERAL_STRING("void"); + break; + + default: + MOZ_ASSUME_UNREACHABLE("This should not happen"); + break; + } + + JS::Rooted value(aCx); + if (!data.ToObject(aCx, &value)) { + return NS_ERROR_FAILURE; + } + + ErrorResult error; + mRequest = aStore->Put(aCx, value, JS::UndefinedHandleValue, error); + if (NS_WARN_IF(error.Failed())) { + return error.ErrorCode(); + } + + rv = mRequest->EventTarget::AddEventListener(NS_LITERAL_STRING("success"), + this, false); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + mCallback = aCallback; + return NS_OK; +} + +NS_IMETHODIMP +DataStoreRevision::HandleEvent(nsIDOMEvent* aEvent) +{ + nsString type; + nsresult rv = aEvent->GetType(type); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + if (!type.EqualsASCII("success")) { + MOZ_ASSUME_UNREACHABLE("This should not happen"); + return NS_ERROR_FAILURE; + } + + mRequest->RemoveEventListener(NS_LITERAL_STRING("success"), this, false); + mRequest = nullptr; + + mCallback->Run(mRevisionID); + return NS_OK; +} + +} // dom namespace +} // mozilla namespace diff --git a/dom/datastore/DataStoreRevision.h b/dom/datastore/DataStoreRevision.h new file mode 100644 index 000000000000..7162ae949545 --- /dev/null +++ b/dom/datastore/DataStoreRevision.h @@ -0,0 +1,52 @@ +/* -*- Mode: c++; c-basic-offset: 2; indent-tabs-mode: nil; tab-width: 40 -*- */ +/* vim: set ts=2 et sw=2 tw=80: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef mozilla_dom_DataStoreRevision_h +#define mozilla_dom_DataStoreRevision_h + +#include "jsapi.h" +#include "nsAutoPtr.h" +#include "nsIDOMEventListener.h" +#include "nsString.h" + +namespace mozilla { +namespace dom { + +namespace indexedDB { +class IDBObjectStore; +class IDBRequest; +} + +class DataStoreRevisionCallback; + +class DataStoreRevision MOZ_FINAL : public nsIDOMEventListener +{ +public: + NS_DECL_ISUPPORTS + + enum RevisionType { + RevisionVoid + }; + + nsresult AddRevision(JSContext* aCx, + indexedDB::IDBObjectStore* aStore, + uint32_t aObjectId, + RevisionType aRevisionType, + DataStoreRevisionCallback* aCallback); + + // nsIDOMEventListener + NS_IMETHOD HandleEvent(nsIDOMEvent* aEvent); + +private: + nsRefPtr mCallback; + nsRefPtr mRequest; + nsString mRevisionID; +}; + +} // namespace dom +} // namespace mozilla + +#endif // mozilla_dom_DataStoreRevision_h diff --git a/dom/datastore/DataStoreService.cpp b/dom/datastore/DataStoreService.cpp new file mode 100644 index 000000000000..b26e8b47af99 --- /dev/null +++ b/dom/datastore/DataStoreService.cpp @@ -0,0 +1,1347 @@ +/* -*- Mode: c++; c-basic-offset: 2; indent-tabs-mode: nil; tab-width: 40 -*- */ +/* vim: set ts=2 et sw=2 tw=80: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "DataStoreService.h" + +#include "DataStoreCallbacks.h" +#include "DataStoreDB.h" +#include "DataStoreRevision.h" +#include "mozilla/dom/DataStore.h" +#include "mozilla/dom/DataStoreBinding.h" +#include "mozilla/dom/DataStoreImplBinding.h" +#include "nsIDataStore.h" + +#include "mozilla/Preferences.h" +#include "mozilla/Services.h" +#include "mozilla/StaticPtr.h" +#include "mozilla/dom/ContentChild.h" +#include "mozilla/dom/ContentParent.h" +#include "mozilla/dom/DOMError.h" +#include "mozilla/dom/indexedDB/IDBCursor.h" +#include "mozilla/dom/indexedDB/IDBObjectStore.h" +#include "mozilla/dom/PermissionMessageUtils.h" +#include "mozilla/dom/Promise.h" +#include "mozilla/unused.h" + +#include "mozIApplication.h" +#include "mozIApplicationClearPrivateDataParams.h" +#include "nsIAppsService.h" +#include "nsIDOMEvent.h" +#include "nsIDocument.h" +#include "nsIDOMGlobalPropertyInitializer.h" +#include "nsIIOService.h" +#include "nsIObserverService.h" +#include "nsIPermissionManager.h" +#include "nsIScriptSecurityManager.h" +#include "nsIUUIDGenerator.h" +#include "nsPIDOMWindow.h" +#include "nsIURI.h" + +#include "nsContentUtils.h" +#include "nsNetCID.h" +#include "nsServiceManagerUtils.h" +#include "nsThreadUtils.h" +#include "nsXULAppAPI.h" + +#define ASSERT_PARENT_PROCESS() \ + AssertIsInMainProcess(); \ + if (NS_WARN_IF(!IsMainProcess())) { \ + return NS_ERROR_FAILURE; \ + } + +namespace mozilla { +namespace dom { + +using namespace indexedDB; + +// This class contains all the information about a DataStore. +class DataStoreInfo +{ +public: + DataStoreInfo() + : mReadOnly(true) + , mEnabled(false) + {} + + DataStoreInfo(const nsAString& aName, + const nsAString& aOriginURL, + const nsAString& aManifestURL, + bool aReadOnly, + bool aEnabled) + { + Init(aName, aOriginURL, aManifestURL, aReadOnly, aEnabled); + } + + void Init(const nsAString& aName, + const nsAString& aOriginURL, + const nsAString& aManifestURL, + bool aReadOnly, + bool aEnabled) + { + mName = aName; + mOriginURL = aOriginURL; + mManifestURL = aManifestURL; + mReadOnly = aReadOnly; + mEnabled = aEnabled; + } + + void Update(const nsAString& aName, + const nsAString& aOriginURL, + const nsAString& aManifestURL, + bool aReadOnly) + { + mName = aName; + mOriginURL = aOriginURL; + mManifestURL = aManifestURL; + mReadOnly = aReadOnly; + } + + void Enable() + { + mEnabled = true; + } + + nsString mName; + nsString mOriginURL; + nsString mManifestURL; + bool mReadOnly; + + // A DataStore is enabled when it has its first revision. + bool mEnabled; +}; + +namespace { + +// Singleton for DataStoreService. +StaticRefPtr gDataStoreService; +static uint64_t gCounterID = 0; + +typedef nsClassHashtable HashApp; + +bool +IsMainProcess() +{ + static const bool isMainProcess = + XRE_GetProcessType() == GeckoProcessType_Default; + return isMainProcess; +} + +void +AssertIsInMainProcess() +{ + MOZ_ASSERT(IsMainProcess()); +} + +void +RejectPromise(nsPIDOMWindow* aWindow, Promise* aPromise, nsresult aRv) +{ + MOZ_ASSERT(NS_IsMainThread()); + MOZ_ASSERT(NS_FAILED(aRv)); + + nsRefPtr error; + if (aRv == NS_ERROR_DOM_SECURITY_ERR) { + error = new DOMError(aWindow, NS_LITERAL_STRING("SecurityError"), + NS_LITERAL_STRING("Access denied")); + } else { + error = new DOMError(aWindow, NS_LITERAL_STRING("InternalError"), + NS_LITERAL_STRING("An error occurred")); + } + + aPromise->MaybeReject(error); +} + +void +DeleteDatabase(const nsAString& aName, + const nsAString& aManifestURL) +{ + AssertIsInMainProcess(); + MOZ_ASSERT(NS_IsMainThread()); + + nsRefPtr db = new DataStoreDB(aManifestURL, aName); + db->Delete(); +} + +PLDHashOperator +DeleteDataStoresAppEnumerator( + const uint32_t& aAppId, + nsAutoPtr& aInfo, + void* aUserData) +{ + AssertIsInMainProcess(); + MOZ_ASSERT(NS_IsMainThread()); + + auto* appId = static_cast(aUserData); + if (*appId != aAppId) { + return PL_DHASH_NEXT; + } + + DeleteDatabase(aInfo->mName, aInfo->mManifestURL); + return PL_DHASH_REMOVE; +} + +PLDHashOperator +DeleteDataStoresEnumerator(const nsAString& aName, + nsAutoPtr& aApps, + void* aUserData) +{ + AssertIsInMainProcess(); + MOZ_ASSERT(NS_IsMainThread()); + + aApps->Enumerate(DeleteDataStoresAppEnumerator, aUserData); + return aApps->Count() ? PL_DHASH_NEXT : PL_DHASH_REMOVE; +} + +void +GeneratePermissionName(nsAString& aPermission, + const nsAString& aName, + const nsAString& aManifestURL) +{ + aPermission.AssignASCII("indexedDB-chrome-"); + aPermission.Append(aName); + aPermission.AppendASCII("|"); + aPermission.Append(aManifestURL); +} + +nsresult +ResetPermission(uint32_t aAppId, const nsAString& aOriginURL, + const nsAString& aManifestURL, + const nsAString& aPermission, + bool aReadOnly) +{ + AssertIsInMainProcess(); + MOZ_ASSERT(NS_IsMainThread()); + + nsresult rv; + nsCOMPtr ioService(do_GetService(NS_IOSERVICE_CONTRACTID, &rv)); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + nsCOMPtr uri; + rv = ioService->NewURI(NS_ConvertUTF16toUTF8(aOriginURL), nullptr, nullptr, + getter_AddRefs(uri)); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + nsIScriptSecurityManager* ssm = nsContentUtils::GetSecurityManager(); + if (!ssm) { + return NS_ERROR_FAILURE; + } + + nsCOMPtr principal; + rv = ssm->GetAppCodebasePrincipal(uri, aAppId, false, + getter_AddRefs(principal)); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + nsCOMPtr pm = + do_GetService(NS_PERMISSIONMANAGER_CONTRACTID); + if (!pm) { + return NS_ERROR_FAILURE; + } + + nsCString basePermission; + basePermission.Append(NS_ConvertUTF16toUTF8(aPermission)); + + // Write permission + { + nsCString permission; + permission.Append(basePermission); + permission.AppendASCII("-write"); + + uint32_t perm = nsIPermissionManager::UNKNOWN_ACTION; + rv = pm->TestExactPermissionFromPrincipal(principal, permission.get(), + &perm); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + if (aReadOnly && perm == nsIPermissionManager::ALLOW_ACTION) { + rv = pm->RemoveFromPrincipal(principal, permission.get()); + } + else if (!aReadOnly && perm != nsIPermissionManager::ALLOW_ACTION) { + rv = pm->AddFromPrincipal(principal, permission.get(), + nsIPermissionManager::ALLOW_ACTION, + nsIPermissionManager::EXPIRE_NEVER, 0); + } + + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + } + + // Read permission + { + nsCString permission; + permission.Append(basePermission); + permission.AppendASCII("-read"); + + uint32_t perm = nsIPermissionManager::UNKNOWN_ACTION; + rv = pm->TestExactPermissionFromPrincipal(principal, permission.get(), + &perm); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + if (perm != nsIPermissionManager::ALLOW_ACTION) { + rv = pm->AddFromPrincipal(principal, permission.get(), + nsIPermissionManager::ALLOW_ACTION, + nsIPermissionManager::EXPIRE_NEVER, 0); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + } + } + + // Generic permission + uint32_t perm = nsIPermissionManager::UNKNOWN_ACTION; + rv = pm->TestExactPermissionFromPrincipal(principal, basePermission.get(), + &perm); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + if (perm != nsIPermissionManager::ALLOW_ACTION) { + rv = pm->AddFromPrincipal(principal, basePermission.get(), + nsIPermissionManager::ALLOW_ACTION, + nsIPermissionManager::EXPIRE_NEVER, 0); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + } + + return NS_OK; +} + +class MOZ_STACK_CLASS GetDataStoreInfosData +{ +public: + GetDataStoreInfosData(nsClassHashtable& aAccessStores, + const nsAString& aName, uint32_t aAppId, + nsTArray& aStores) + : mAccessStores(aAccessStores) + , mName(aName) + , mAppId(aAppId) + , mStores(aStores) + {} + + nsClassHashtable& mAccessStores; + nsString mName; + uint32_t mAppId; + nsTArray& mStores; +}; + +PLDHashOperator +GetDataStoreInfosEnumerator(const uint32_t& aAppId, + DataStoreInfo* aInfo, + void* aUserData) +{ + AssertIsInMainProcess(); + MOZ_ASSERT(NS_IsMainThread()); + + auto* data = static_cast(aUserData); + if (aAppId == data->mAppId) { + return PL_DHASH_NEXT; + } + + HashApp* apps; + if (!data->mAccessStores.Get(data->mName, &apps)) { + return PL_DHASH_NEXT; + } + + DataStoreInfo* accessInfo = nullptr; + if (!apps->Get(data->mAppId, &accessInfo)) { + return PL_DHASH_NEXT; + } + + bool readOnly = aInfo->mReadOnly || accessInfo->mReadOnly; + DataStoreInfo* accessStore = data->mStores.AppendElement(); + accessStore->Init(aInfo->mName, aInfo->mOriginURL, + aInfo->mManifestURL, readOnly, + aInfo->mEnabled); + + return PL_DHASH_NEXT; +} + +// This class is useful to enumerate the add permissions for each app. +class MOZ_STACK_CLASS AddPermissionsData +{ +public: + AddPermissionsData(const nsAString& aPermission, bool aReadOnly) + : mPermission(aPermission) + , mReadOnly(aReadOnly) + , mResult(NS_OK) + {} + + nsString mPermission; + bool mReadOnly; + nsresult mResult; +}; + +PLDHashOperator +AddPermissionsEnumerator(const uint32_t& aAppId, + DataStoreInfo* aInfo, + void* userData) +{ + AssertIsInMainProcess(); + MOZ_ASSERT(NS_IsMainThread()); + + auto* data = static_cast(userData); + + // ReadOnly is decided by the owner first. + bool readOnly = data->mReadOnly || aInfo->mReadOnly; + + data->mResult = ResetPermission(aAppId, aInfo->mOriginURL, + aInfo->mManifestURL, + data->mPermission, + readOnly); + return NS_FAILED(data->mResult) ? PL_DHASH_STOP : PL_DHASH_NEXT; +} + +// This class is useful to enumerate the add permissions for each app. +class MOZ_STACK_CLASS AddAccessPermissionsData +{ +public: + AddAccessPermissionsData(const nsAString& aName, bool aReadOnly) + : mName(aName) + , mReadOnly(aReadOnly) + , mResult(NS_OK) + {} + + nsString mName; + bool mReadOnly; + nsresult mResult; +}; + +PLDHashOperator +AddAccessPermissionsEnumerator(const uint32_t& aAppId, + DataStoreInfo* aInfo, + void* userData) +{ + AssertIsInMainProcess(); + MOZ_ASSERT(NS_IsMainThread()); + + auto* data = static_cast(userData); + + nsString permission; + GeneratePermissionName(permission, data->mName, aInfo->mManifestURL); + + // ReadOnly is decided by the owner first. + bool readOnly = aInfo->mReadOnly || data->mReadOnly; + + data->mResult = ResetPermission(aAppId, aInfo->mOriginURL, + aInfo->mManifestURL, + permission, readOnly); + return NS_FAILED(data->mResult) ? PL_DHASH_STOP : PL_DHASH_NEXT; +} + +} /* anonymous namespace */ + +// A PendingRequest is created when a content code wants a list of DataStores +// but some of them are not enabled yet. +class PendingRequest +{ +public: + void Init(nsPIDOMWindow* aWindow, Promise* aPromise, + const nsTArray& aStores, + const nsTArray& aPendingDataStores) + { + mWindow = aWindow; + mPromise = aPromise; + mStores = aStores; + mPendingDataStores = aPendingDataStores; + } + + nsCOMPtr mWindow; + nsRefPtr mPromise; + nsTArray mStores; + + // This array contains the list of manifestURLs of the DataStores that are + // not enabled yet. + nsTArray mPendingDataStores; +}; + +// This callback is used to enable a DataStore when its first revisionID is +// created. +class RevisionAddedEnableStoreCallback MOZ_FINAL : + public DataStoreRevisionCallback +{ +public: + NS_INLINE_DECL_REFCOUNTING(RevisionAddedEnableStoreCallback); + + RevisionAddedEnableStoreCallback(uint32_t aAppId, + const nsAString& aName, + const nsAString& aManifestURL) + : mAppId(aAppId) + , mName(aName) + , mManifestURL(aManifestURL) + { + AssertIsInMainProcess(); + MOZ_ASSERT(NS_IsMainThread()); + } + + void + Run(const nsAString& aRevisionId) + { + AssertIsInMainProcess(); + MOZ_ASSERT(NS_IsMainThread()); + + nsRefPtr service = DataStoreService::Get(); + MOZ_ASSERT(service); + + service->EnableDataStore(mAppId, mName, mManifestURL); + } + +private: + uint32_t mAppId; + nsString mName; + nsString mManifestURL; +}; + +// This DataStoreDBCallback is called when DataStoreDB opens the DataStore DB. +// Then the first revision will be created if it doesn't exist yet. +class FirstRevisionIdCallback MOZ_FINAL : public DataStoreDBCallback + , public nsIDOMEventListener +{ +public: + NS_DECL_ISUPPORTS + + FirstRevisionIdCallback(uint32_t aAppId, const nsAString& aName, + const nsAString& aManifestURL) + : mAppId(aAppId) + , mName(aName) + , mManifestURL(aManifestURL) + { + AssertIsInMainProcess(); + MOZ_ASSERT(NS_IsMainThread()); + } + + void + Run(DataStoreDB* aDb, bool aSuccess) + { + AssertIsInMainProcess(); + MOZ_ASSERT(NS_IsMainThread()); + MOZ_ASSERT(aDb); + + if (!aSuccess) { + NS_WARNING("Failed to create the first revision."); + return; + } + + mTxn = aDb->Transaction(); + + ErrorResult rv; + nsRefPtr store = + mTxn->ObjectStore(NS_LITERAL_STRING(DATASTOREDB_REVISION), rv); + if (NS_WARN_IF(rv.Failed())) { + return; + } + + // a Null JSContext is ok because OpenCursor ignores it if the range is + // undefined. + mRequest = store->OpenCursor(nullptr, JS::UndefinedHandleValue, + IDBCursorDirection::Prev, rv); + if (NS_WARN_IF(rv.Failed())) { + return; + } + + nsresult res; + res = mRequest->EventTarget::AddEventListener(NS_LITERAL_STRING("success"), + this, false); + if (NS_WARN_IF(NS_FAILED(res))) { + return; + } + } + + // nsIDOMEventListener + NS_IMETHOD + HandleEvent(nsIDOMEvent* aEvent) + { + AssertIsInMainProcess(); + MOZ_ASSERT(NS_IsMainThread()); + + nsString type; + nsresult rv = aEvent->GetType(type); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + if (!type.EqualsASCII("success")) { + return NS_ERROR_FAILURE; + } + + mRequest->RemoveEventListener(NS_LITERAL_STRING("success"), this, false); + + // Note: this cx is only used for rooting and AddRevision, neither of which + // actually care which compartment we're in. + AutoSafeJSContext cx; + + ErrorResult error; + JS::Rooted result(cx, mRequest->GetResult(error)); + if (NS_WARN_IF(error.Failed())) { + return error.ErrorCode(); + } + + // This means that the content is a IDBCursor, so the first revision already + // exists. + if (result.isObject()) { + nsRefPtr service = DataStoreService::Get(); + MOZ_ASSERT(service); + + return service->EnableDataStore(mAppId, mName, mManifestURL); + } + + MOZ_ASSERT(mTxn); + nsRefPtr store = + mTxn->ObjectStore(NS_LITERAL_STRING(DATASTOREDB_REVISION), error); + if (NS_WARN_IF(error.Failed())) { + return error.ErrorCode(); + } + MOZ_ASSERT(store); + + nsRefPtr callback = + new RevisionAddedEnableStoreCallback(mAppId, mName, mManifestURL); + + // If the revision doesn't exist, let's create it. + nsRefPtr mRevision = new DataStoreRevision(); + return mRevision->AddRevision(cx, store, 0, DataStoreRevision::RevisionVoid, + callback); + } + +private: + nsRefPtr mRequest; + + nsRefPtr mTxn; + nsRefPtr mRevision; + + uint32_t mAppId; + nsString mName; + nsString mManifestURL; +}; + +NS_IMPL_ISUPPORTS(FirstRevisionIdCallback, nsIDOMEventListener) + +// This class calls the 'retrieveRevisionId' method of the DataStore object for +// any DataStore in the 'mResults' array. When all of them are called, the +// promise is resolved with 'mResults'. +// The reson why this has to be done is because DataStore are object that can be +// created in any thread and in any process. The first revision has been +// created, but they don't know its value yet. +class RetrieveRevisionsCounter +{ +public: + NS_INLINE_DECL_REFCOUNTING(RetrieveRevisionsCounter); + + RetrieveRevisionsCounter(uint32_t aId, Promise* aPromise, uint32_t aCount) + : mPromise(aPromise) + , mId(aId) + , mCount(aCount) + { + MOZ_ASSERT(NS_IsMainThread()); + } + + void + AppendDataStore(JSContext* aCx, DataStore* aDataStore, + nsIDataStore* aDataStoreIf) + { + MOZ_ASSERT(NS_IsMainThread()); + + mResults.AppendElement(aDataStore); + + // DataStore will run this callback when the revisionID is retrieved. + JSFunction* func = js::NewFunctionWithReserved(aCx, JSCallback, + 0 /* nargs */, 0 /* flags */, + nullptr, nullptr); + if (!func) { + return; + } + + JS::Rooted obj(aCx, JS_GetFunctionObject(func)); + if (!obj) { + return; + } + + // We use the ID to know which counter is this. The service keeps all of + // these counters alive with their own IDs in an hashtable. + js::SetFunctionNativeReserved(obj, 0, JS::Int32Value(mId)); + + JS::Rooted value(aCx, JS::ObjectValue(*obj)); + nsresult rv = aDataStoreIf->RetrieveRevisionId(value); + if (NS_WARN_IF(NS_FAILED(rv))) { + return; + } + } + +private: + static bool + JSCallback(JSContext* aCx, unsigned aArgc, JS::Value* aVp) + { + MOZ_ASSERT(NS_IsMainThread()); + + JS::CallArgs args = CallArgsFromVp(aArgc, aVp); + + JS::Rooted value(aCx, + js::GetFunctionNativeReserved(&args.callee(), 0)); + uint32_t id = value.toInt32(); + + nsRefPtr service = DataStoreService::Get(); + MOZ_ASSERT(service); + + nsRefPtr counter = service->GetCounter(id); + MOZ_ASSERT(counter); + + // When all the callbacks are called, we can resolve the promise and remove + // the counter from the service. + --counter->mCount; + if (!counter->mCount) { + service->RemoveCounter(id); + counter->mPromise->MaybeResolve(counter->mResults); + } + + return true; + } + + nsRefPtr mPromise; + nsTArray> mResults; + + uint32_t mId; + uint32_t mCount; +}; + +/* static */ already_AddRefed +DataStoreService::GetOrCreate() +{ + MOZ_ASSERT(NS_IsMainThread()); + + if (!gDataStoreService) { + nsRefPtr service = new DataStoreService(); + if (NS_WARN_IF(NS_FAILED(service->Init()))) { + return nullptr; + } + + gDataStoreService = service; + } + + nsRefPtr service = gDataStoreService.get(); + return service.forget(); +} + +/* static */ already_AddRefed +DataStoreService::Get() +{ + MOZ_ASSERT(NS_IsMainThread()); + + nsRefPtr service = gDataStoreService.get(); + return service.forget(); +} + +/* static */ void +DataStoreService::Shutdown() +{ + MOZ_ASSERT(NS_IsMainThread()); + + if (gDataStoreService) { + if (IsMainProcess()) { + nsCOMPtr obs = mozilla::services::GetObserverService(); + if (obs) { + obs->RemoveObserver(gDataStoreService, "webapps-clear-data"); + } + } + + gDataStoreService = nullptr; + } +} + +NS_INTERFACE_MAP_BEGIN(DataStoreService) + NS_INTERFACE_MAP_ENTRY_AMBIGUOUS(nsISupports, nsIDataStoreService) + NS_INTERFACE_MAP_ENTRY(nsIDataStoreService) + NS_INTERFACE_MAP_ENTRY(nsIObserver) +NS_INTERFACE_MAP_END + +NS_IMPL_ADDREF(DataStoreService) +NS_IMPL_RELEASE(DataStoreService) + +DataStoreService::DataStoreService() +{ + MOZ_ASSERT(NS_IsMainThread()); +} + +DataStoreService::~DataStoreService() +{ + MOZ_ASSERT(NS_IsMainThread()); +} + +nsresult +DataStoreService::Init() +{ + if (!IsMainProcess()) { + return NS_OK; + } + + nsCOMPtr obs = mozilla::services::GetObserverService(); + if (!obs) { + return NS_ERROR_FAILURE; + } + + nsresult rv = obs->AddObserver(this, "webapps-clear-data", false); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + return NS_OK; +} + +NS_IMETHODIMP +DataStoreService::InstallDataStore(uint32_t aAppId, + const nsAString& aName, + const nsAString& aOriginURL, + const nsAString& aManifestURL, + bool aReadOnly) +{ + ASSERT_PARENT_PROCESS() + MOZ_ASSERT(NS_IsMainThread()); + + HashApp* apps = nullptr; + if (!mStores.Get(aName, &apps)) { + apps = new HashApp(); + mStores.Put(aName, apps); + } + + DataStoreInfo* info = nullptr; + if (!apps->Get(aAppId, &info)) { + info = new DataStoreInfo(aName, aOriginURL, aManifestURL, aReadOnly, false); + apps->Put(aAppId, info); + } else { + info->Update(aName, aOriginURL, aManifestURL, aReadOnly); + } + + nsresult rv = AddPermissions(aAppId, aName, aOriginURL, aManifestURL, + aReadOnly); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + // Immediately create the first revision. + return CreateFirstRevisionId(aAppId, aName, aManifestURL); +} + +NS_IMETHODIMP +DataStoreService::InstallAccessDataStore(uint32_t aAppId, + const nsAString& aName, + const nsAString& aOriginURL, + const nsAString& aManifestURL, + bool aReadOnly) +{ + ASSERT_PARENT_PROCESS() + MOZ_ASSERT(NS_IsMainThread()); + + HashApp* apps = nullptr; + if (!mAccessStores.Get(aName, &apps)) { + apps = new HashApp(); + mAccessStores.Put(aName, apps); + } + + DataStoreInfo* info = nullptr; + if (!apps->Get(aAppId, &info)) { + info = new DataStoreInfo(aName, aOriginURL, aManifestURL, aReadOnly, false); + apps->Put(aAppId, info); + } else { + info->Update(aName, aOriginURL, aManifestURL, aReadOnly); + } + + return AddAccessPermissions(aAppId, aName, aOriginURL, aManifestURL, + aReadOnly); +} + +NS_IMETHODIMP +DataStoreService::GetDataStores(nsIDOMWindow* aWindow, + const nsAString& aName, + nsISupports** aDataStores) +{ + // FIXME This will be a thread-safe method. + MOZ_ASSERT(NS_IsMainThread()); + + nsCOMPtr window = do_QueryInterface(aWindow); + if (!window) { + return NS_ERROR_FAILURE; + } + + nsCOMPtr global = do_QueryInterface(window); + nsRefPtr promise = new Promise(global); + + nsCOMPtr document = window->GetDoc(); + MOZ_ASSERT(document); + + nsCOMPtr principal = document->NodePrincipal(); + MOZ_ASSERT(principal); + + nsTArray stores; + + // If this request comes from the main process, we have access to the + // window, so we can skip the ipc communication. + if (IsMainProcess()) { + uint32_t appId; + nsresult rv = principal->GetAppId(&appId); + if (NS_FAILED(rv)) { + RejectPromise(window, promise, rv); + promise.forget(aDataStores); + return NS_OK; + } + + rv = GetDataStoreInfos(aName, appId, stores); + if (NS_FAILED(rv)) { + RejectPromise(window, promise, rv); + promise.forget(aDataStores); + return NS_OK; + } + } + + else { + // This method can be called in the child so we need to send a request + // to the parent and create DataStore object here. + ContentChild* contentChild = ContentChild::GetSingleton(); + + nsTArray array; + if (!contentChild->SendDataStoreGetStores(nsAutoString(aName), + IPC::Principal(principal), + &array)) { + RejectPromise(window, promise, NS_ERROR_FAILURE); + promise.forget(aDataStores); + return NS_OK; + } + + for (uint32_t i = 0; i < array.Length(); ++i) { + DataStoreInfo* info = stores.AppendElement(); + info->Init(array[i].name(), array[i].originURL(), + array[i].manifestURL(), array[i].readOnly(), + array[i].enabled()); + } + } + + GetDataStoresCreate(window, promise, stores); + promise.forget(aDataStores); + return NS_OK; +} + +void +DataStoreService::GetDataStoresCreate(nsPIDOMWindow* aWindow, Promise* aPromise, + const nsTArray& aStores) +{ + MOZ_ASSERT(NS_IsMainThread()); + + if (!aStores.Length()) { + GetDataStoresResolve(aWindow, aPromise, aStores); + return; + } + + nsTArray pendingDataStores; + for (uint32_t i = 0; i < aStores.Length(); ++i) { + if (!aStores[i].mEnabled) { + pendingDataStores.AppendElement(aStores[i].mManifestURL); + } + } + + if (!pendingDataStores.Length()) { + GetDataStoresResolve(aWindow, aPromise, aStores); + return; + } + + PendingRequests* requests; + if (!mPendingRequests.Get(aStores[0].mName, &requests)) { + requests = new PendingRequests(); + mPendingRequests.Put(aStores[0].mName, requests); + } + + PendingRequest* request = requests->AppendElement(); + request->Init(aWindow, aPromise, aStores, pendingDataStores); +} + +void +DataStoreService::GetDataStoresResolve(nsPIDOMWindow* aWindow, + Promise* aPromise, + const nsTArray& aStores) +{ + MOZ_ASSERT(NS_IsMainThread()); + + if (!aStores.Length()) { + nsTArray> results; + aPromise->MaybeResolve(results); + return; + } + + AutoSafeJSContext cx; + + // The counter will finish this task once all the DataStores will know their + // first revision Ids. + nsRefPtr counter = + new RetrieveRevisionsCounter(++gCounterID, aPromise, aStores.Length()); + mPendingCounters.Put(gCounterID, counter); + + for (uint32_t i = 0; i < aStores.Length(); ++i) { + nsCOMPtr dataStore = + do_CreateInstance("@mozilla.org/dom/datastore;1"); + if (NS_WARN_IF(!dataStore)) { + return; + } + + nsresult rv = dataStore->Init(aWindow, aStores[i].mName, + aStores[i].mManifestURL, + aStores[i].mReadOnly); + if (NS_WARN_IF(NS_FAILED(rv))) { + return; + } + + nsCOMPtr xpcwrappedjs = do_QueryInterface(dataStore); + if (NS_WARN_IF(!xpcwrappedjs)) { + return; + } + + JS::Rooted dataStoreJS(cx, xpcwrappedjs->GetJSObject()); + if (NS_WARN_IF(!dataStoreJS)) { + return; + } + + JSAutoCompartment ac(cx, dataStoreJS); + nsRefPtr dataStoreObj = new DataStoreImpl(dataStoreJS, + aWindow); + + nsRefPtr exposedStore = new DataStore(aWindow); + + ErrorResult error; + exposedStore->SetDataStoreImpl(*dataStoreObj, error); + if (error.Failed()) { + return; + } + + JS::Rooted obj(cx, exposedStore->WrapObject(cx)); + MOZ_ASSERT(obj); + + JS::Rooted exposedObject(cx, JS::ObjectValue(*obj)); + dataStore->SetExposedObject(exposedObject); + + counter->AppendDataStore(cx, exposedStore, dataStore); + } +} + +// Thie method populates 'aStores' with the list of DataStores with 'aName' as +// name and available for this 'aAppId'. +nsresult +DataStoreService::GetDataStoreInfos(const nsAString& aName, + uint32_t aAppId, + nsTArray& aStores) +{ + AssertIsInMainProcess(); + MOZ_ASSERT(NS_IsMainThread()); + + nsCOMPtr appsService = + do_GetService("@mozilla.org/AppsService;1"); + if (NS_WARN_IF(!appsService)) { + return NS_ERROR_FAILURE; + } + + nsCOMPtr app; + nsresult rv = appsService->GetAppByLocalId(aAppId, getter_AddRefs(app)); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + if (!app) { + return NS_ERROR_DOM_SECURITY_ERR; + } + + uint16_t status; + rv = app->GetAppStatus(&status); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + if (status != nsIPrincipal::APP_STATUS_CERTIFIED && + !Preferences::GetBool("dom.testing.datastore_enabled_for_hosted_apps", + false)) { + return NS_ERROR_DOM_SECURITY_ERR; + } + + aStores.Clear(); + + HashApp* apps = nullptr; + if (!mStores.Get(aName, &apps)) { + return NS_OK; + } + + DataStoreInfo* info = nullptr; + if (apps->Get(aAppId, &info)) { + DataStoreInfo* owned = aStores.AppendElement(); + owned->Init(info->mName, info->mOriginURL, info->mManifestURL, false, + info->mEnabled); + } + + GetDataStoreInfosData data(mAccessStores, aName, aAppId, aStores); + apps->EnumerateRead(GetDataStoreInfosEnumerator, &data); + return NS_OK; +} + +// This method is called when an app with DataStores is deleted. +void +DataStoreService::DeleteDataStores(uint32_t aAppId) +{ + AssertIsInMainProcess(); + MOZ_ASSERT(NS_IsMainThread()); + + mStores.Enumerate(DeleteDataStoresEnumerator, &aAppId); + mAccessStores.Enumerate(DeleteDataStoresEnumerator, &aAppId); +} + +NS_IMETHODIMP +DataStoreService::Observe(nsISupports* aSubject, + const char* aTopic, + const char16_t* aData) +{ + AssertIsInMainProcess(); + MOZ_ASSERT(NS_IsMainThread()); + + if (strcmp(aTopic, "webapps-clear-data")) { + return NS_OK; + } + + nsCOMPtr params = + do_QueryInterface(aSubject); + MOZ_ASSERT(params); + + // DataStore is explosed to apps, not browser content. + bool browserOnly; + nsresult rv = params->GetBrowserOnly(&browserOnly); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + if (browserOnly) { + return NS_OK; + } + + uint32_t appId; + rv = params->GetAppId(&appId); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + DeleteDataStores(appId); + + return NS_OK; +} + +nsresult +DataStoreService::AddPermissions(uint32_t aAppId, + const nsAString& aName, + const nsAString& aOriginURL, + const nsAString& aManifestURL, + bool aReadOnly) +{ + AssertIsInMainProcess(); + MOZ_ASSERT(NS_IsMainThread()); + + // This is the permission name. + nsString permission; + GeneratePermissionName(permission, aName, aManifestURL); + + // When a new DataStore is installed, the permissions must be set for the + // owner app. + nsresult rv = ResetPermission(aAppId, aOriginURL, aManifestURL, permission, + aReadOnly); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + // For any app that wants to have access to this DataStore we add the + // permissions. + HashApp* apps; + if (!mAccessStores.Get(aName, &apps)) { + return NS_OK; + } + + AddPermissionsData data(permission, aReadOnly); + apps->EnumerateRead(AddPermissionsEnumerator, &data); + return data.mResult; +} + +nsresult +DataStoreService::AddAccessPermissions(uint32_t aAppId, const nsAString& aName, + const nsAString& aOriginURL, + const nsAString& aManifestURL, + bool aReadOnly) +{ + AssertIsInMainProcess(); + MOZ_ASSERT(NS_IsMainThread()); + + // When an app wants to have access to a DataStore, the permissions must be + // set. + HashApp* apps = nullptr; + if (!mStores.Get(aName, &apps)) { + return NS_OK; + } + + AddAccessPermissionsData data(aName, aReadOnly); + apps->EnumerateRead(AddAccessPermissionsEnumerator, &data); + return data.mResult; +} + +// This method starts the operation to create the first revision for a DataStore +// if needed. +nsresult +DataStoreService::CreateFirstRevisionId(uint32_t aAppId, + const nsAString& aName, + const nsAString& aManifestURL) +{ + AssertIsInMainProcess(); + MOZ_ASSERT(NS_IsMainThread()); + + nsRefPtr db = new DataStoreDB(aManifestURL, aName); + + nsRefPtr callback = + new FirstRevisionIdCallback(aAppId, aName, aManifestURL); + + Sequence dbs; + dbs.AppendElement(NS_LITERAL_STRING(DATASTOREDB_REVISION)); + + return db->Open(IDBTransactionMode::Readwrite, dbs, callback); +} + +nsresult +DataStoreService::EnableDataStore(uint32_t aAppId, const nsAString& aName, + const nsAString& aManifestURL) +{ + MOZ_ASSERT(NS_IsMainThread()); + + { + HashApp* apps = nullptr; + DataStoreInfo* info = nullptr; + if (mStores.Get(aName, &apps) && apps->Get(aAppId, &info)) { + info->Enable(); + } + } + + // Notify the child processes. + if (IsMainProcess()) { + nsTArray children; + ContentParent::GetAll(children); + for (uint32_t i = 0; i < children.Length(); i++) { + if (children[i]->NeedsDataStoreInfos()) { + unused << children[i]->SendDataStoreNotify(aAppId, nsAutoString(aName), + nsAutoString(aManifestURL)); + } + } + } + + // Maybe we have some pending request waiting for this DataStore. + PendingRequests* requests; + if (!mPendingRequests.Get(aName, &requests)) { + return NS_OK; + } + + for (uint32_t i = 0; i < requests->Length();) { + PendingRequest& request = requests->ElementAt(i); + nsTArray::index_type pos = + request.mPendingDataStores.IndexOf(aManifestURL); + if (pos != request.mPendingDataStores.NoIndex) { + request.mPendingDataStores.RemoveElementAt(pos); + + // No other pending dataStores. + if (request.mPendingDataStores.IsEmpty()) { + GetDataStoresResolve(request.mWindow, request.mPromise, + request.mStores); + requests->RemoveElementAt(i); + continue; + } + } + + ++i; + } + + // No other pending requests for this name. + if (requests->IsEmpty()) { + mPendingRequests.Remove(aName); + } + + return NS_OK; +} + +already_AddRefed +DataStoreService::GetCounter(uint32_t aId) const +{ + MOZ_ASSERT(NS_IsMainThread()); + + nsRefPtr counter; + return mPendingCounters.Get(aId, getter_AddRefs(counter)) + ? counter.forget() : nullptr; +} + +void +DataStoreService::RemoveCounter(uint32_t aId) +{ + MOZ_ASSERT(NS_IsMainThread()); + mPendingCounters.Remove(aId); +} + +nsresult +DataStoreService::GetDataStoresFromIPC(const nsAString& aName, + nsIPrincipal* aPrincipal, + nsTArray* aValue) +{ + MOZ_ASSERT(IsMainProcess()); + MOZ_ASSERT(NS_IsMainThread()); + + uint32_t appId; + nsresult rv = aPrincipal->GetAppId(&appId); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + nsTArray stores; + rv = GetDataStoreInfos(aName, appId, stores); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + for (uint32_t i = 0; i < stores.Length(); ++i) { + DataStoreSetting* data = aValue->AppendElement(); + data->name() = stores[i].mName; + data->originURL() = stores[i].mOriginURL; + data->manifestURL() = stores[i].mManifestURL; + data->readOnly() = stores[i].mReadOnly; + data->enabled() = stores[i].mEnabled; + } + + return NS_OK; +} + +nsresult +DataStoreService::GenerateUUID(nsAString& aID) +{ + nsresult rv; + + if (!mUUIDGenerator) { + mUUIDGenerator = do_GetService("@mozilla.org/uuid-generator;1", &rv); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + } + + nsID id; + rv = mUUIDGenerator->GenerateUUIDInPlace(&id); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + char chars[NSID_LENGTH]; + id.ToProvidedString(chars); + CopyASCIItoUTF16(chars, aID); + + return NS_OK; +} + +} // namespace dom +} // namespace mozilla diff --git a/dom/datastore/DataStoreService.h b/dom/datastore/DataStoreService.h new file mode 100644 index 000000000000..73210ff1d7b4 --- /dev/null +++ b/dom/datastore/DataStoreService.h @@ -0,0 +1,110 @@ +/* -*- Mode: c++; c-basic-offset: 2; indent-tabs-mode: nil; tab-width: 40 -*- */ +/* vim: set ts=2 et sw=2 tw=80: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef mozilla_dom_DataStoreService_h +#define mozilla_dom_DataStoreService_h + +#include "mozilla/dom/PContent.h" +#include "nsClassHashtable.h" +#include "nsIDataStoreService.h" +#include "nsIObserver.h" +#include "nsRefPtrHashtable.h" + +class nsIPrincipal; +class nsIUUIDGenerator; +class nsPIDOMWindow; + +namespace mozilla { +namespace dom { + +class DataStoreInfo; +class FirstRevisionIdCallback; +class PendingRequest; +class Promise; +class RetrieveRevisionsCounter; +class RevisionAddedEnableStoreCallback; + +class DataStoreService MOZ_FINAL : public nsIDataStoreService + , public nsIObserver +{ + friend class ContentChild; + friend class FirstRevisionIdCallback; + friend class RetrieveRevisionsCounter; + friend class RevisionAddedEnableStoreCallback; + +public: + NS_DECL_ISUPPORTS + NS_DECL_NSIOBSERVER + NS_DECL_NSIDATASTORESERVICE + + // Returns the DataStoreService singleton. Only to be called from main + // thread. + static already_AddRefed GetOrCreate(); + + static already_AddRefed Get(); + + static void Shutdown(); + + nsresult GenerateUUID(nsAString& aID); + + nsresult GetDataStoresFromIPC(const nsAString& aName, + nsIPrincipal* aPrincipal, + nsTArray* aValue); + +private: + DataStoreService(); + ~DataStoreService(); + + nsresult Init(); + + typedef nsClassHashtable HashApp; + + nsresult AddPermissions(uint32_t aAppId, const nsAString& aName, + const nsAString& aOriginURL, + const nsAString& aManifestURL, + bool aReadOnly); + + nsresult AddAccessPermissions(uint32_t aAppId, const nsAString& aName, + const nsAString& aOriginURL, + const nsAString& aManifestURL, + bool aReadOnly); + + nsresult CreateFirstRevisionId(uint32_t aAppId, const nsAString& aName, + const nsAString& aManifestURL); + + void GetDataStoresCreate(nsPIDOMWindow* aWindow, Promise* aPromise, + const nsTArray& aStores); + + void GetDataStoresResolve(nsPIDOMWindow* aWindow, Promise* aPromise, + const nsTArray& aStores); + + nsresult GetDataStoreInfos(const nsAString& aName, uint32_t aAppId, + nsTArray& aStores); + + void DeleteDataStores(uint32_t aAppId); + + nsresult EnableDataStore(uint32_t aAppId, const nsAString& aName, + const nsAString& aManifestURL); + + already_AddRefed GetCounter(uint32_t aId) const; + + void RemoveCounter(uint32_t aId); + + nsClassHashtable mStores; + nsClassHashtable mAccessStores; + + typedef nsTArray PendingRequests; + nsClassHashtable mPendingRequests; + + nsRefPtrHashtable mPendingCounters; + + nsCOMPtr mUUIDGenerator; +}; + +} // namespace dom +} // namespace mozilla + +#endif // mozilla_dom_DataStoreService_h diff --git a/dom/datastore/DataStoreService.js b/dom/datastore/DataStoreService.js deleted file mode 100644 index fd8faafca758..000000000000 --- a/dom/datastore/DataStoreService.js +++ /dev/null @@ -1,522 +0,0 @@ -/* -*- Mode: js2; js2-basic-offset: 2; indent-tabs-mode: nil; -*- */ -/* vim: set ft=javascript ts=2 et sw=2 tw=80: */ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -'use strict' - -/* static functions */ - -function debug(s) { - //dump('DEBUG DataStoreService: ' + s + '\n'); -} - -const {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components; - -Cu.import('resource://gre/modules/XPCOMUtils.jsm'); -Cu.import('resource://gre/modules/Services.jsm'); -Cu.import('resource://gre/modules/DataStoreImpl.jsm'); -Cu.import("resource://gre/modules/DataStoreDB.jsm"); -Cu.import("resource://gre/modules/DOMRequestHelper.jsm"); - -XPCOMUtils.defineLazyServiceGetter(this, "cpmm", - "@mozilla.org/childprocessmessagemanager;1", - "nsIMessageSender"); - -XPCOMUtils.defineLazyServiceGetter(this, "ppmm", - "@mozilla.org/parentprocessmessagemanager;1", - "nsIMessageBroadcaster"); - -XPCOMUtils.defineLazyServiceGetter(this, "permissionManager", - "@mozilla.org/permissionmanager;1", - "nsIPermissionManager"); - -XPCOMUtils.defineLazyServiceGetter(this, "secMan", - "@mozilla.org/scriptsecuritymanager;1", - "nsIScriptSecurityManager"); - -/* DataStoreService */ - -const DATASTORESERVICE_CID = Components.ID('{d193d0e2-c677-4a7b-bb0a-19155b470f2e}'); -const REVISION_VOID = "void"; - -function DataStoreService() { - debug('DataStoreService Constructor'); - - this.inParent = Cc["@mozilla.org/xre/app-info;1"].getService(Ci.nsIXULRuntime) - .processType == Ci.nsIXULRuntime.PROCESS_TYPE_DEFAULT; - - if (this.inParent) { - let obs = Services.obs; - if (!obs) { - debug("DataStore Error: observer-service is null!"); - return; - } - - obs.addObserver(this, 'webapps-clear-data', false); - } - - let self = this; - cpmm.addMessageListener("datastore-first-revision-created", - function(aMsg) { self.receiveMessage(aMsg); }); -} - -DataStoreService.prototype = { - inParent: false, - - // Hash of DataStores - stores: {}, - accessStores: {}, - pendingRequests: {}, - - installDataStore: function(aAppId, aName, aOrigin, aOwner, aReadOnly) { - debug('installDataStore - appId: ' + aAppId + ', aName: ' + - aName + ', aOrigin: ' + aOrigin + ', aOwner:' + aOwner + - ', aReadOnly: ' + aReadOnly); - - this.checkIfInParent(); - - if (aName in this.stores && aAppId in this.stores[aName]) { - debug('This should not happen'); - return; - } - - if (!(aName in this.stores)) { - this.stores[aName] = {}; - } - - // A DataStore is enabled when it has a first valid revision. - this.stores[aName][aAppId] = { origin: aOrigin, owner: aOwner, - readOnly: aReadOnly, enabled: false }; - - this.addPermissions(aAppId, aName, aOrigin, aOwner, aReadOnly); - - this.createFirstRevisionId(aAppId, aName, aOwner); - }, - - installAccessDataStore: function(aAppId, aName, aOrigin, aOwner, aReadOnly) { - debug('installAccessDataStore - appId: ' + aAppId + ', aName: ' + - aName + ', aOrigin: ' + aOrigin + ', aOwner:' + aOwner + - ', aReadOnly: ' + aReadOnly); - - this.checkIfInParent(); - - if (aName in this.accessStores && aAppId in this.accessStores[aName]) { - debug('This should not happen'); - return; - } - - if (!(aName in this.accessStores)) { - this.accessStores[aName] = {}; - } - - this.accessStores[aName][aAppId] = { origin: aOrigin, owner: aOwner, - readOnly: aReadOnly }; - this.addAccessPermissions(aAppId, aName, aOrigin, aOwner, aReadOnly); - }, - - checkIfInParent: function() { - if (!this.inParent) { - throw "DataStore can execute this operation just in the parent process"; - } - }, - - createFirstRevisionId: function(aAppId, aName, aOwner) { - debug("createFirstRevisionId database: " + aName); - - let self = this; - let db = new DataStoreDB(); - db.init(aOwner, aName); - db.revisionTxn( - 'readwrite', - function(aTxn, aRevisionStore) { - debug("createFirstRevisionId - transaction success"); - - let request = aRevisionStore.openCursor(null, 'prev'); - request.onsuccess = function(aEvent) { - let cursor = aEvent.target.result; - if (cursor) { - debug("First revision already created."); - self.enableDataStore(aAppId, aName, aOwner); - } else { - // If the revision doesn't exist, let's create the first one. - db.addRevision(aRevisionStore, 0, REVISION_VOID, function() { - debug("First revision created."); - self.enableDataStore(aAppId, aName, aOwner); - }); - } - }; - } - ); - }, - - enableDataStore: function(aAppId, aName, aOwner) { - if (aName in this.stores && aAppId in this.stores[aName]) { - this.stores[aName][aAppId].enabled = true; - ppmm.broadcastAsyncMessage('datastore-first-revision-created', - { name: aName, owner: aOwner }); - } - }, - - addPermissions: function(aAppId, aName, aOrigin, aOwner, aReadOnly) { - // When a new DataStore is installed, the permissions must be set for the - // owner app. - let permission = "indexedDB-chrome-" + aName + '|' + aOwner; - this.resetPermissions(aAppId, aOrigin, aOwner, permission, aReadOnly); - - // For any app that wants to have access to this DataStore we add the - // permissions. - if (aName in this.accessStores) { - for (let appId in this.accessStores[aName]) { - // ReadOnly is decided by the owner first. - let readOnly = aReadOnly || this.accessStores[aName][appId].readOnly; - this.resetPermissions(appId, this.accessStores[aName][appId].origin, - this.accessStores[aName][appId].owner, - permission, readOnly); - } - } - }, - - addAccessPermissions: function(aAppId, aName, aOrigin, aOwner, aReadOnly) { - // When an app wants to have access to a DataStore, the permissions must be - // set. - if (!(aName in this.stores)) { - return; - } - - for (let appId in this.stores[aName]) { - let permission = "indexedDB-chrome-" + aName + '|' + this.stores[aName][appId].owner; - // The ReadOnly is decied by the owenr first. - let readOnly = this.stores[aName][appId].readOnly || aReadOnly; - this.resetPermissions(aAppId, aOrigin, aOwner, permission, readOnly); - } - }, - - resetPermissions: function(aAppId, aOrigin, aOwner, aPermission, aReadOnly) { - debug("ResetPermissions - appId: " + aAppId + " - origin: " + aOrigin + - " - owner: " + aOwner + " - permissions: " + aPermission + - " - readOnly: " + aReadOnly); - - let uri = Services.io.newURI(aOrigin, null, null); - let principal = secMan.getAppCodebasePrincipal(uri, aAppId, false); - - let result = permissionManager.testExactPermissionFromPrincipal(principal, - aPermission + '-write'); - - if (aReadOnly && result == Ci.nsIPermissionManager.ALLOW_ACTION) { - debug("Write permission removed"); - permissionManager.removeFromPrincipal(principal, aPermission + '-write'); - } else if (!aReadOnly && result != Ci.nsIPermissionManager.ALLOW_ACTION) { - debug("Write permission added"); - permissionManager.addFromPrincipal(principal, aPermission + '-write', - Ci.nsIPermissionManager.ALLOW_ACTION); - } - - result = permissionManager.testExactPermissionFromPrincipal(principal, - aPermission + '-read'); - if (result != Ci.nsIPermissionManager.ALLOW_ACTION) { - debug("Read permission added"); - permissionManager.addFromPrincipal(principal, aPermission + '-read', - Ci.nsIPermissionManager.ALLOW_ACTION); - } - - result = permissionManager.testExactPermissionFromPrincipal(principal, aPermission); - if (result != Ci.nsIPermissionManager.ALLOW_ACTION) { - debug("Generic permission added"); - permissionManager.addFromPrincipal(principal, aPermission, - Ci.nsIPermissionManager.ALLOW_ACTION); - } - }, - - getDataStores: function(aWindow, aName) { - debug('getDataStores - aName: ' + aName); - - let self = this; - return new aWindow.Promise(function(resolve, reject) { - // If this request comes from the main process, we have access to the - // window, so we can skip the ipc communication. - if (self.inParent) { - let stores = self.getDataStoresInfo(aName, aWindow.document.nodePrincipal.appId); - if (stores === null) { - reject(new aWindow.DOMError("SecurityError", "Access denied")); - return; - } - self.getDataStoreCreate(aWindow, resolve, stores); - } else { - // This method can be called in the child so we need to send a request - // to the parent and create DataStore object here. - new DataStoreServiceChild(aWindow, aName, function(aStores) { - debug("DataStoreServiceChild success callback!"); - self.getDataStoreCreate(aWindow, resolve, aStores); - }, function() { - debug("DataStoreServiceChild error callback!"); - reject(new aWindow.DOMError("SecurityError", "Access denied")); - }); - } - }); - }, - - getDataStoresInfo: function(aName, aAppId) { - debug('GetDataStoresInfo'); - - let appsService = Cc["@mozilla.org/AppsService;1"] - .getService(Ci.nsIAppsService); - let app = appsService.getAppByLocalId(aAppId); - if (!app) { - return null; - } - - let prefName = "dom.testing.datastore_enabled_for_hosted_apps"; - if (app.appStatus != Ci.nsIPrincipal.APP_STATUS_CERTIFIED && - (Services.prefs.getPrefType(prefName) == Services.prefs.PREF_INVALID || - !Services.prefs.getBoolPref(prefName))) { - return null; - } - - let results = []; - - if (aName in this.stores) { - if (aAppId in this.stores[aName]) { - results.push({ name: aName, - owner: this.stores[aName][aAppId].owner, - readOnly: false, - enabled: this.stores[aName][aAppId].enabled }); - } - - for (var i in this.stores[aName]) { - if (i == aAppId) { - continue; - } - - let access = this.getDataStoreAccess(aName, aAppId); - if (!access) { - continue; - } - - let readOnly = this.stores[aName][i].readOnly || access.readOnly; - results.push({ name: aName, - owner: this.stores[aName][i].owner, - readOnly: readOnly, - enabled: this.stores[aName][i].enabled }); - } - } - - return results; - }, - - getDataStoreCreate: function(aWindow, aResolve, aStores) { - debug("GetDataStoreCreate"); - - let results = new aWindow.Array(); - - if (!aStores.length) { - aResolve(results); - return; - } - - let pendingDataStores = []; - - for (let i = 0; i < aStores.length; ++i) { - if (!aStores[i].enabled) { - pendingDataStores.push(aStores[i].owner); - } - } - - if (!pendingDataStores.length) { - this.getDataStoreResolve(aWindow, aResolve, aStores); - return; - } - - if (!(aStores[0].name in this.pendingRequests)) { - this.pendingRequests[aStores[0].name] = []; - } - - this.pendingRequests[aStores[0].name].push({ window: aWindow, - resolve: aResolve, - stores: aStores, - pendingDataStores: pendingDataStores }); - }, - - getDataStoreResolve: function(aWindow, aResolve, aStores) { - debug("GetDataStoreResolve"); - - let callbackPending = aStores.length; - let results = new aWindow.Array(); - - if (!callbackPending) { - aResolve(results); - return; - } - - for (let i = 0; i < aStores.length; ++i) { - let obj = new DataStore(aWindow, aStores[i].name, - aStores[i].owner, aStores[i].readOnly); - - let storeImpl = aWindow.DataStoreImpl._create(aWindow, obj); - - let exposedStore = new aWindow.DataStore(); - exposedStore.setDataStoreImpl(storeImpl); - - obj.exposedObject = exposedStore; - - results.push(exposedStore); - - obj.retrieveRevisionId( - function() { - --callbackPending; - if (!callbackPending) { - aResolve(results); - } - } - ); - } - }, - - getDataStoreAccess: function(aName, aAppId) { - if (!(aName in this.accessStores) || - !(aAppId in this.accessStores[aName])) { - return null; - } - - return this.accessStores[aName][aAppId]; - }, - - observe: function observe(aSubject, aTopic, aData) { - debug('observe - aTopic: ' + aTopic); - if (aTopic != 'webapps-clear-data') { - return; - } - - let params = - aSubject.QueryInterface(Ci.mozIApplicationClearPrivateDataParams); - - // DataStore is explosed to apps, not browser content. - if (params.browserOnly) { - return; - } - - function isEmpty(aMap) { - for (var key in aMap) { - if (aMap.hasOwnProperty(key)) { - return false; - } - } - return true; - } - - for (let key in this.stores) { - if (params.appId in this.stores[key]) { - this.deleteDatabase(key, this.stores[key][params.appId].owner); - delete this.stores[key][params.appId]; - } - - if (isEmpty(this.stores[key])) { - delete this.stores[key]; - } - } - - for (let key in this.accessStores) { - if (params.appId in this.accessStores[key]) { - delete this.accessStores[key][params.appId]; - } - - if (isEmpty(this.accessStores[key])) { - delete this.accessStores[key]; - } - } - }, - - deleteDatabase: function(aName, aOwner) { - debug("delete database: " + aName); - - let db = new DataStoreDB(); - db.init(aOwner, aName); - db.delete(); - }, - - receiveMessage: function(aMsg) { - debug("receiveMessage"); - let data = aMsg.json; - - if (!(data.name in this.pendingRequests)) { - return; - } - - for (let i = 0; i < this.pendingRequests[data.name].length;) { - let pos = this.pendingRequests[data.name][i].pendingDataStores.indexOf(data.owner); - if (pos != -1) { - this.pendingRequests[data.name][i].pendingDataStores.splice(pos, 1); - if (!this.pendingRequests[data.name][i].pendingDataStores.length) { - this.getDataStoreResolve(this.pendingRequests[data.name][i].window, - this.pendingRequests[data.name][i].resolve, - this.pendingRequests[data.name][i].stores); - this.pendingRequests[data.name].splice(i, 1); - continue; - } - } - - ++i; - } - - if (!this.pendingRequests[data.name].length) { - delete this.pendingRequests[data.name]; - } - }, - - classID : DATASTORESERVICE_CID, - QueryInterface: XPCOMUtils.generateQI([Ci.nsIDataStoreService, - Ci.nsIObserver]), - classInfo: XPCOMUtils.generateCI({ - classID: DATASTORESERVICE_CID, - contractID: '@mozilla.org/datastore-service;1', - interfaces: [Ci.nsIDataStoreService, Ci.nsIObserver], - flags: Ci.nsIClassInfo.SINGLETON - }) -}; - -/* DataStoreServiceChild */ - -function DataStoreServiceChild(aWindow, aName, aSuccessCb, aErrorCb) { - debug("DataStoreServiceChild created"); - this.init(aWindow, aName, aSuccessCb, aErrorCb); -} - -DataStoreServiceChild.prototype = { - __proto__: DOMRequestIpcHelper.prototype, - - init: function(aWindow, aName, aSuccessCb, aErrorCb) { - debug("DataStoreServiceChild init"); - this._successCb = aSuccessCb; - this._errorCb = aErrorCb; - this._name = aName; - - this.initDOMRequestHelper(aWindow, [ "DataStore:Get:Return:OK", - "DataStore:Get:Return:KO" ]); - - cpmm.sendAsyncMessage("DataStore:Get", - { name: aName }, null, aWindow.document.nodePrincipal ); - }, - - receiveMessage: function(aMessage) { - debug("DataStoreServiceChild receiveMessage"); - - if (aMessage.data.name != this._name) { - return; - } - - switch (aMessage.name) { - case 'DataStore:Get:Return:OK': - this.destroyDOMRequestHelper(); - this._successCb(aMessage.data.stores); - break; - - case 'DataStore:Get:Return:KO': - this.destroyDOMRequestHelper(); - this._errorCb(); - break; - } - } -} - -this.NSGetFactory = XPCOMUtils.generateNSGetFactory([DataStoreService]); diff --git a/dom/datastore/DataStoreServiceInternal.jsm b/dom/datastore/DataStoreServiceInternal.jsm deleted file mode 100644 index f301a3793e30..000000000000 --- a/dom/datastore/DataStoreServiceInternal.jsm +++ /dev/null @@ -1,68 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this file, - * You can obtain one at http://mozilla.org/MPL/2.0/. */ - -"use strict" - -const {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components; - -this.EXPORTED_SYMBOLS = ["DataStoreServiceInternal"]; - -function debug(s) { - //dump('DEBUG DataStoreServiceInternal: ' + s + '\n'); -} - -Cu.import("resource://gre/modules/XPCOMUtils.jsm"); -Cu.import("resource://gre/modules/Services.jsm"); - -XPCOMUtils.defineLazyServiceGetter(this, "ppmm", - "@mozilla.org/parentprocessmessagemanager;1", - "nsIMessageBroadcaster"); - -XPCOMUtils.defineLazyServiceGetter(this, "dataStoreService", - "@mozilla.org/datastore-service;1", - "nsIDataStoreService"); - -this.DataStoreServiceInternal = { - init: function() { - debug("init"); - - let inParent = Cc["@mozilla.org/xre/app-info;1"].getService(Ci.nsIXULRuntime) - .processType == Ci.nsIXULRuntime.PROCESS_TYPE_DEFAULT; - if (inParent) { - ppmm.addMessageListener("DataStore:Get", this); - } - }, - - receiveMessage: function(aMessage) { - debug("receiveMessage"); - - if (aMessage.name != 'DataStore:Get') { - return; - } - - let prefName = 'dom.testing.datastore_enabled_for_hosted_apps'; - if ((Services.prefs.getPrefType(prefName) == Services.prefs.PREF_INVALID || - !Services.prefs.getBoolPref(prefName)) && - !aMessage.target.assertAppHasStatus(Ci.nsIPrincipal.APP_STATUS_CERTIFIED)) { - return; - } - - let msg = aMessage.data; - - if (!aMessage.principal || - aMessage.principal.appId == Ci.nsIScriptSecurityManager.UNKNOWN_APP_ID) { - aMessage.target.sendAsyncMessage("DataStore:Get:Return:KO"); - return; - } - - msg.stores = dataStoreService.getDataStoresInfo(msg.name, aMessage.principal.appId); - if (msg.stores === null) { - aMessage.target.sendAsyncMessage("DataStore:Get:Return:KO"); - return; - } - aMessage.target.sendAsyncMessage("DataStore:Get:Return:OK", msg); - } -} - -DataStoreServiceInternal.init(); diff --git a/dom/datastore/moz.build b/dom/datastore/moz.build index 092fc75e4795..b85c7e9bb64a 100644 --- a/dom/datastore/moz.build +++ b/dom/datastore/moz.build @@ -5,6 +5,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. XPIDL_SOURCES += [ + 'nsIDataStore.idl', 'nsIDataStoreService.idl', ] @@ -13,11 +14,15 @@ XPIDL_MODULE = 'dom_datastore' EXPORTS.mozilla.dom += [ 'DataStore.h', 'DataStoreCursor.h', + 'DataStoreService.h', ] SOURCES += [ 'DataStore.cpp', 'DataStoreCursor.cpp', + 'DataStoreDB.cpp', + 'DataStoreRevision.cpp', + 'DataStoreService.cpp', ] LOCAL_INCLUDES += [ @@ -26,17 +31,18 @@ LOCAL_INCLUDES += [ EXTRA_COMPONENTS += [ 'DataStore.manifest', - 'DataStoreService.js', + 'DataStoreImpl.js', ] EXTRA_JS_MODULES += [ 'DataStoreChangeNotifier.jsm', 'DataStoreCursorImpl.jsm', 'DataStoreDB.jsm', - 'DataStoreImpl.jsm', - 'DataStoreServiceInternal.jsm', ] MOCHITEST_MANIFESTS += ['tests/mochitest.ini'] +include('/ipc/chromium/chromium-config.mozbuild') + FINAL_LIBRARY = 'xul' +FAIL_ON_WARNINGS = True diff --git a/dom/datastore/nsIDataStore.idl b/dom/datastore/nsIDataStore.idl new file mode 100644 index 000000000000..fc3d82598874 --- /dev/null +++ b/dom/datastore/nsIDataStore.idl @@ -0,0 +1,23 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "nsISupports.idl" + +interface nsIDOMWindow; + +// NOTE: This is a temporary interface. +// It will be removed in the next patches for rewriting DataStore in C++. +[scriptable, uuid(0b41fef5-14ba-48b0-923c-3d8fb64692ae)] +interface nsIDataStore : nsISupports +{ + void init(in nsIDOMWindow window, + in DOMString name, + in DOMString manifestURL, + in boolean readOnly); + + attribute jsval exposedObject; + + void retrieveRevisionId(in jsval cb); +}; diff --git a/dom/datastore/nsIDataStoreService.idl b/dom/datastore/nsIDataStoreService.idl index a3fc914d0520..1157164f3809 100644 --- a/dom/datastore/nsIDataStoreService.idl +++ b/dom/datastore/nsIDataStoreService.idl @@ -7,7 +7,7 @@ interface nsIDOMWindow; -[scriptable, uuid(bd02d09c-41ab-47b7-9319-57aa8e5059b0)] +[scriptable, uuid(0a050c4f-d292-4a14-8712-09bc1019840a)] interface nsIDataStoreService : nsISupports { void installDataStore(in unsigned long appId, @@ -24,12 +24,4 @@ interface nsIDataStoreService : nsISupports nsISupports getDataStores(in nsIDOMWindow window, in DOMString name); - - // This is an array of objects composed by: - // - readOnly: boolean - // - name: DOMString - // - owner: DOMString - // - enabled: true/false - true if this dataStore is ready to be used. - jsval getDataStoresInfo(in DOMString name, - in unsigned long appId); }; diff --git a/dom/indexedDB/IDBFactory.cpp b/dom/indexedDB/IDBFactory.cpp index 3eb8e99f8468..c9e5aec67ebb 100644 --- a/dom/indexedDB/IDBFactory.cpp +++ b/dom/indexedDB/IDBFactory.cpp @@ -235,9 +235,6 @@ IDBFactory::Create(ContentParent* aContentParent, NS_ASSERTION(NS_IsMainThread(), "Wrong thread!"); NS_ASSERTION(IndexedDatabaseManager::IsMainProcess(), "Wrong process!"); NS_ASSERTION(nsContentUtils::IsCallerChrome(), "Only for chrome!"); - NS_ASSERTION(aContentParent, "Null ContentParent!"); - - NS_ASSERTION(!nsContentUtils::GetCurrentJSContext(), "Should be called from C++"); // We need to get this information before we push a null principal to avoid // IsCallerChrome() assertion in quota manager. diff --git a/dom/indexedDB/IDBFactory.h b/dom/indexedDB/IDBFactory.h index e51d0900fdf1..7bf8ff6e325b 100644 --- a/dom/indexedDB/IDBFactory.h +++ b/dom/indexedDB/IDBFactory.h @@ -75,7 +75,7 @@ public: IDBFactory** aFactory); // Called when using IndexedDB from a JS component or a JSM in a different - // process. + // process or from a C++ component. static nsresult Create(ContentParent* aContentParent, IDBFactory** aFactory); diff --git a/dom/indexedDB/IDBRequest.cpp b/dom/indexedDB/IDBRequest.cpp index d27b0559afad..dff6ce7218b0 100644 --- a/dom/indexedDB/IDBRequest.cpp +++ b/dom/indexedDB/IDBRequest.cpp @@ -327,7 +327,7 @@ IDBRequest::WrapObject(JSContext* aCx) } JS::Value -IDBRequest::GetResult(JSContext* aCx, mozilla::ErrorResult& aRv) const +IDBRequest::GetResult(mozilla::ErrorResult& aRv) const { NS_ASSERTION(NS_IsMainThread(), "Wrong thread!"); diff --git a/dom/indexedDB/IDBRequest.h b/dom/indexedDB/IDBRequest.h index e0ca44837000..34504db644b3 100644 --- a/dom/indexedDB/IDBRequest.h +++ b/dom/indexedDB/IDBRequest.h @@ -132,7 +132,13 @@ public: } JS::Value - GetResult(JSContext* aCx, ErrorResult& aRv) const; + GetResult(ErrorResult& aRv) const; + + JS::Value + GetResult(JSContext* aCx, ErrorResult& aRv) const + { + return GetResult(aRv); + } IDBTransaction* GetTransaction() const diff --git a/dom/ipc/ContentChild.cpp b/dom/ipc/ContentChild.cpp index 3fe4a97ab31b..41c12fb3093f 100644 --- a/dom/ipc/ContentChild.cpp +++ b/dom/ipc/ContentChild.cpp @@ -142,6 +142,7 @@ #include "nsDeviceStorage.h" #include "AudioChannelService.h" #include "JavaScriptChild.h" +#include "mozilla/dom/DataStoreService.h" #include "mozilla/dom/telephony/PTelephonyChild.h" #include "mozilla/dom/time/DateCacheCleaner.h" #include "mozilla/net/NeckoMessageUtils.h" @@ -768,6 +769,24 @@ ContentChild::RecvAudioChannelNotify() return true; } +bool +ContentChild::RecvDataStoreNotify(const uint32_t& aAppId, + const nsString& aName, + const nsString& aManifestURL) +{ + nsRefPtr service = DataStoreService::GetOrCreate(); + if (NS_WARN_IF(!service)) { + return false; + } + + nsresult rv = service->EnableDataStore(aAppId, aName, aManifestURL); + if (NS_WARN_IF(NS_FAILED(rv))) { + return false; + } + + return true; +} + bool ContentChild::DeallocPMemoryReportRequestChild(PMemoryReportRequestChild* actor) { diff --git a/dom/ipc/ContentChild.h b/dom/ipc/ContentChild.h index 556cbb181773..67c1468b3550 100644 --- a/dom/ipc/ContentChild.h +++ b/dom/ipc/ContentChild.h @@ -157,6 +157,10 @@ public: virtual bool RecvAudioChannelNotify() MOZ_OVERRIDE; + virtual bool + RecvDataStoreNotify(const uint32_t& aAppId, const nsString& aName, + const nsString& aManifestURL) MOZ_OVERRIDE; + virtual PTestShellChild* AllocPTestShellChild() MOZ_OVERRIDE; virtual bool DeallocPTestShellChild(PTestShellChild*) MOZ_OVERRIDE; virtual bool RecvPTestShellConstructor(PTestShellChild*) MOZ_OVERRIDE; diff --git a/dom/ipc/ContentParent.cpp b/dom/ipc/ContentParent.cpp index 01115152302d..a0efbeb5df47 100644 --- a/dom/ipc/ContentParent.cpp +++ b/dom/ipc/ContentParent.cpp @@ -33,6 +33,7 @@ #include "mozilla/ClearOnShutdown.h" #include "mozilla/dom/asmjscache/AsmJSCache.h" #include "mozilla/dom/Element.h" +#include "mozilla/dom/DataStoreService.h" #include "mozilla/dom/ExternalHelperAppParent.h" #include "mozilla/dom/PFileDescriptorSetParent.h" #include "mozilla/dom/PCycleCollectWithLogsParent.h" @@ -1495,6 +1496,7 @@ ContentParent::InitializeMembers() mNumDestroyingTabs = 0; mIsAlive = true; mSendPermissionUpdates = false; + mSendDataStoreInfos = false; mCalledClose = false; mCalledCloseWithError = false; mCalledKillHard = false; @@ -2076,6 +2078,26 @@ ContentParent::RecvAudioChannelChangeDefVolChannel(const int32_t& aChannel, return true; } +bool +ContentParent::RecvDataStoreGetStores( + const nsString& aName, + const IPC::Principal& aPrincipal, + InfallibleTArray* aValue) +{ + nsRefPtr service = DataStoreService::GetOrCreate(); + if (NS_WARN_IF(!service)) { + return false; + } + + nsresult rv = service->GetDataStoresFromIPC(aName, aPrincipal, aValue); + if (NS_WARN_IF(NS_FAILED(rv))) { + return false; + } + + mSendDataStoreInfos = true; + return true; +} + bool ContentParent::RecvBroadcastVolume(const nsString& aVolumeName) { diff --git a/dom/ipc/ContentParent.h b/dom/ipc/ContentParent.h index cc0a8bae25bb..06991e72e8c2 100644 --- a/dom/ipc/ContentParent.h +++ b/dom/ipc/ContentParent.h @@ -160,10 +160,14 @@ public: int32_t Pid(); - bool NeedsPermissionsUpdate() { + bool NeedsPermissionsUpdate() const { return mSendPermissionUpdates; } + bool NeedsDataStoreInfos() const { + return mSendDataStoreInfos; + } + BlobParent* GetOrCreateActorForBlob(nsIDOMBlob* aBlob); /** @@ -526,6 +530,11 @@ private: virtual bool RecvGetSystemMemory(const uint64_t& getterId) MOZ_OVERRIDE; virtual bool RecvBroadcastVolume(const nsString& aVolumeName) MOZ_OVERRIDE; + virtual bool RecvDataStoreGetStores( + const nsString& aName, + const IPC::Principal& aPrincipal, + InfallibleTArray* aValue) MOZ_OVERRIDE; + virtual bool RecvSpeakerManagerGetSpeakerStatus(bool* aValue) MOZ_OVERRIDE; virtual bool RecvSpeakerManagerForceSpeaker(const bool& aEnable) MOZ_OVERRIDE; @@ -600,6 +609,7 @@ private: bool mIsAlive; bool mSendPermissionUpdates; + bool mSendDataStoreInfos; bool mIsForBrowser; bool mIsNuwaProcess; diff --git a/dom/ipc/PContent.ipdl b/dom/ipc/PContent.ipdl index c588a0483db1..6d7c44604318 100644 --- a/dom/ipc/PContent.ipdl +++ b/dom/ipc/PContent.ipdl @@ -262,6 +262,14 @@ struct PrefSetting { MaybePrefValue userValue; }; +struct DataStoreSetting { + nsString name; + nsString originURL; + nsString manifestURL; + bool readOnly; + bool enabled; +}; + intr protocol PContent { parent opens PCompositor; @@ -338,6 +346,9 @@ child: async SpeakerManagerNotify(); + async DataStoreNotify(uint32_t aAppId, nsString aName, + nsString aManifestURL); + /** * Dump this process's GC and CC logs to the provided files. * @@ -550,6 +561,9 @@ parent: async AudioChannelChangedNotification(); async AudioChannelChangeDefVolChannel(int32_t aChannel, bool aHidden); + sync DataStoreGetStores(nsString aName, Principal aPrincipal) + returns (DataStoreSetting[] dataStores); + async FilePathUpdateNotify(nsString aType, nsString aStorageName, nsString aFilepath, diff --git a/dom/ipc/TabChild.cpp b/dom/ipc/TabChild.cpp index aebdeded95eb..2db09daceb0d 100644 --- a/dom/ipc/TabChild.cpp +++ b/dom/ipc/TabChild.cpp @@ -111,8 +111,7 @@ typedef nsDataHashtable TabChildMap; static TabChildMap* sTabChildren; TabChildBase::TabChildBase() - : mOldViewportWidth(0.0f) - , mContentDocumentIsDisplayed(false) + : mContentDocumentIsDisplayed(false) , mTabChildGlobal(nullptr) , mInnerSize(0, 0) { @@ -151,7 +150,7 @@ TabChildBase::InitializeRootMetrics() void TabChildBase::SetCSSViewport(const CSSSize& aSize) { - mOldViewportWidth = aSize.width; + mOldViewportSize = aSize; if (mContentDocumentIsDisplayed) { nsCOMPtr utils(GetDOMWindowUtils()); @@ -221,10 +220,10 @@ TabChildBase::HandlePossibleViewportChange() return false; } - float oldBrowserWidth = mOldViewportWidth; + CSSSize oldBrowserSize = mOldViewportSize; mLastRootMetrics.mViewport.SizeTo(viewport); - if (!oldBrowserWidth) { - oldBrowserWidth = kDefaultViewportSize.width; + if (oldBrowserSize == CSSSize()) { + oldBrowserSize = kDefaultViewportSize; } SetCSSViewport(viewport); @@ -241,9 +240,11 @@ TabChildBase::HandlePossibleViewportChange() return false; } - float oldScreenWidth = mLastRootMetrics.mCompositionBounds.width; - if (!oldScreenWidth) { - oldScreenWidth = mInnerSize.width; + ScreenIntSize oldScreenSize = ViewAs( + mLastRootMetrics.mCompositionBounds.Size(), + PixelCastJustification::ScreenToParentLayerForRoot); + if (oldScreenSize == ScreenIntSize()) { + oldScreenSize = mInnerSize; } FrameMetrics metrics(mLastRootMetrics); @@ -266,7 +267,9 @@ TabChildBase::HandlePossibleViewportChange() // In all of these cases, we maintain how much actual content is visible // within the screen width. Note that "actual content" may be different with // respect to CSS pixels because of the CSS viewport size changing. - float oldIntrinsicScale = oldScreenWidth / oldBrowserWidth; + float oldIntrinsicScale = + std::max(oldScreenSize.width / oldBrowserSize.width, + oldScreenSize.height / oldBrowserSize.height); metrics.ZoomBy(metrics.CalculateIntrinsicScale().scale / oldIntrinsicScale); // Changing the zoom when we're not doing a first paint will get ignored @@ -2499,6 +2502,7 @@ TabChild::InitRenderingState() if (!sTabChildren) { sTabChildren = new TabChildMap; } + MOZ_ASSERT(!sTabChildren->Get(id)); sTabChildren->Put(id, this); mLayersId = id; } @@ -2708,14 +2712,14 @@ TabChild::GetFrom(uint64_t aLayersId) } void -TabChild::DidComposite() +TabChild::DidComposite(uint64_t aTransactionId) { MOZ_ASSERT(mWidget); MOZ_ASSERT(mWidget->GetLayerManager()); MOZ_ASSERT(mWidget->GetLayerManager()->GetBackendType() == LayersBackend::LAYERS_CLIENT); ClientLayerManager *manager = static_cast(mWidget->GetLayerManager()); - manager->DidComposite(); + manager->DidComposite(aTransactionId); } NS_IMETHODIMP diff --git a/dom/ipc/TabChild.h b/dom/ipc/TabChild.h index adbdb315bfbe..4ea95cb77677 100644 --- a/dom/ipc/TabChild.h +++ b/dom/ipc/TabChild.h @@ -212,7 +212,7 @@ protected: bool UpdateFrameHandler(const mozilla::layers::FrameMetrics& aFrameMetrics); protected: - float mOldViewportWidth; + CSSSize mOldViewportSize; bool mContentDocumentIsDisplayed; nsRefPtr mTabChildGlobal; ScreenIntSize mInnerSize; @@ -446,7 +446,7 @@ public: static TabChild* GetFrom(nsIPresShell* aPresShell); static TabChild* GetFrom(uint64_t aLayersId); - void DidComposite(); + void DidComposite(uint64_t aTransactionId); static inline TabChild* GetFrom(nsIDOMWindow* aWindow) diff --git a/dom/tests/mochitest/bugs/mochitest.ini b/dom/tests/mochitest/bugs/mochitest.ini index 17c91b20f2fd..bb062a6ada1d 100644 --- a/dom/tests/mochitest/bugs/mochitest.ini +++ b/dom/tests/mochitest/bugs/mochitest.ini @@ -68,7 +68,6 @@ skip-if = (buildapp == 'b2g' && toolkit != 'gonk') #Bug 931116, b2g desktop spec [test_bug394769.html] [test_bug396843.html] skip-if = (buildapp == 'b2g' && (toolkit != 'gonk' || debug)) -[test_bug397571.html] [test_bug400204.html] [test_bug404748.html] [test_bug406375.html] diff --git a/dom/tests/mochitest/bugs/test_bug397571.html b/dom/tests/mochitest/bugs/test_bug397571.html deleted file mode 100644 index e90cf0e4a31f..000000000000 --- a/dom/tests/mochitest/bugs/test_bug397571.html +++ /dev/null @@ -1,136 +0,0 @@ - - - - - Test for Bug 397571 - - - - - - Mozilla Bug 397571 -

- -
-
-
- - diff --git a/dom/tests/mochitest/general/test_interfaces.html b/dom/tests/mochitest/general/test_interfaces.html index e5fd0810c97e..112ddf590dfe 100644 --- a/dom/tests/mochitest/general/test_interfaces.html +++ b/dom/tests/mochitest/general/test_interfaces.html @@ -1200,8 +1200,6 @@ var interfaceNamesInGlobalScope = "WheelEvent", // IMPORTANT: Do not change this list without review from a DOM peer! "Window", -// IMPORTANT: Do not change this list without review from a DOM peer! - "WindowUtils", // IMPORTANT: Do not change this list without review from a DOM peer! "Worker", // IMPORTANT: Do not change this list without review from a DOM peer! diff --git a/dom/webidl/DataStore.webidl b/dom/webidl/DataStore.webidl index dca5f69a7554..9694371833be 100644 --- a/dom/webidl/DataStore.webidl +++ b/dom/webidl/DataStore.webidl @@ -108,3 +108,10 @@ dictionary DataStoreTask { DataStoreKey? id; any data; }; + +// For internal use. +dictionary DataStoreRevisionData { + DOMString revisionId = ""; + unsigned long objectId = 0; + DOMString operation = ""; +}; diff --git a/gfx/gl/GLReadTexImageHelper.cpp b/gfx/gl/GLReadTexImageHelper.cpp index 9e633da055a2..9096b26fe67b 100644 --- a/gfx/gl/GLReadTexImageHelper.cpp +++ b/gfx/gl/GLReadTexImageHelper.cpp @@ -637,21 +637,18 @@ GLReadTexImageHelper::ReadTexImage(GLuint aTextureId, /* Setup quad geometry */ mGL->fBindBuffer(LOCAL_GL_ARRAY_BUFFER, 0); - mGL->fEnableVertexAttribArray(0); - mGL->fEnableVertexAttribArray(1); float w = (aTextureTarget == LOCAL_GL_TEXTURE_RECTANGLE) ? (float) aSize.width : 1.0f; float h = (aTextureTarget == LOCAL_GL_TEXTURE_RECTANGLE) ? (float) aSize.height : 1.0f; - const float vertexArray[4*2] = { -1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f - }; - mGL->fVertexAttribPointer(0, 2, LOCAL_GL_FLOAT, LOCAL_GL_FALSE, 0, vertexArray); + }; + ScopedVertexAttribPointer autoAttrib0(mGL, 0, 2, LOCAL_GL_FLOAT, LOCAL_GL_FALSE, 0, 0, vertexArray); const float u0 = 0.0f; const float u1 = w; @@ -661,7 +658,7 @@ GLReadTexImageHelper::ReadTexImage(GLuint aTextureId, u1, v0, u0, v1, u1, v1 }; - mGL->fVertexAttribPointer(1, 2, LOCAL_GL_FLOAT, LOCAL_GL_FALSE, 0, texCoordArray); + ScopedVertexAttribPointer autoAttrib1(mGL, 1, 2, LOCAL_GL_FLOAT, LOCAL_GL_FALSE, 0, 0, texCoordArray); /* Bind the texture */ if (aTextureId) { @@ -677,16 +674,12 @@ GLReadTexImageHelper::ReadTexImage(GLuint aTextureId, mGL->fDrawArrays(LOCAL_GL_TRIANGLE_STRIP, 0, 4); CLEANUP_IF_GLERROR_OCCURRED("when drawing texture"); - mGL->fDisableVertexAttribArray(1); - mGL->fDisableVertexAttribArray(0); - /* Read-back draw results */ ReadPixelsIntoDataSurface(mGL, isurf); CLEANUP_IF_GLERROR_OCCURRED("when reading pixels into surface"); } while (false); /* Restore GL state */ -//cleanup: mGL->fBindRenderbuffer(LOCAL_GL_RENDERBUFFER, oldrb); mGL->fBindFramebuffer(LOCAL_GL_FRAMEBUFFER, oldfb); mGL->fUseProgram(oldprog); diff --git a/gfx/layers/FrameMetrics.h b/gfx/layers/FrameMetrics.h index c9e7f5c59585..15582cb6ba49 100644 --- a/gfx/layers/FrameMetrics.h +++ b/gfx/layers/FrameMetrics.h @@ -187,7 +187,9 @@ public: // into its composition bounds. CSSToScreenScale CalculateIntrinsicScale() const { - return CSSToScreenScale(float(mCompositionBounds.width) / float(mViewport.width)); + return CSSToScreenScale( + std::max(float(mCompositionBounds.width) / mViewport.width, + float(mCompositionBounds.height) / mViewport.height)); } // Return the scale factor for converting from CSS pixels (for this layer) diff --git a/gfx/layers/TransactionIdAllocator.h b/gfx/layers/TransactionIdAllocator.h new file mode 100644 index 000000000000..7cddd9f2fdb5 --- /dev/null +++ b/gfx/layers/TransactionIdAllocator.h @@ -0,0 +1,52 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef GFX_TRANSACTION_ID_ALLOCATOR_H +#define GFX_TRANSACTION_ID_ALLOCATOR_H + +#include "nsISupportsImpl.h" + +namespace mozilla { +namespace layers { + +class TransactionIdAllocator { +public: + NS_INLINE_DECL_REFCOUNTING(TransactionIdAllocator) + + virtual ~TransactionIdAllocator() {} + + /** + * Allocate a unique id number for the current refresh tick, can + * only be called while IsInRefresh(). + * + * If too many id's are allocated without being returned then + * the refresh driver will suspend until they catch up. + */ + virtual uint64_t GetTransactionId() = 0; + + /** + * Notify that all work (including asynchronous composites) + * for a given transaction id has been completed. + * + * If the refresh driver has been suspended because + * of having too many outstanding id's, then this may + * resume it. + */ + virtual void NotifyTransactionCompleted(uint64_t aTransactionId) = 0; + + /** + * Revoke a transaction id that isn't needed to track + * completion of asynchronous work. This is similar + * to NotifyTransactionCompleted except avoids + * return ordering issues. + */ + virtual void RevokeTransactionId(uint64_t aTransactionId) = 0; +}; + +} +} + + +#endif /* GFX_TRANSACTION_ID_ALLOCATOR_H */ diff --git a/gfx/layers/apz/src/AsyncPanZoomController.cpp b/gfx/layers/apz/src/AsyncPanZoomController.cpp index f75eb887fe59..25d042569af3 100644 --- a/gfx/layers/apz/src/AsyncPanZoomController.cpp +++ b/gfx/layers/apz/src/AsyncPanZoomController.cpp @@ -2101,6 +2101,8 @@ void AsyncPanZoomController::NotifyLayersUpdated(const FrameMetrics& aLayerMetri mFrameMetrics.mHasScrollgrab = aLayerMetrics.mHasScrollgrab; if (scrollOffsetUpdated) { + CancelAnimation(); + APZC_LOG("%p updating scroll offset from (%f, %f) to (%f, %f)\n", this, mFrameMetrics.GetScrollOffset().x, mFrameMetrics.GetScrollOffset().y, aLayerMetrics.GetScrollOffset().x, aLayerMetrics.GetScrollOffset().y); diff --git a/gfx/layers/apz/src/Axis.cpp b/gfx/layers/apz/src/Axis.cpp index b6e14e8f84a4..addc90cb5f9d 100644 --- a/gfx/layers/apz/src/Axis.cpp +++ b/gfx/layers/apz/src/Axis.cpp @@ -203,7 +203,7 @@ void Axis::CancelTouch() { } bool Axis::CanScroll() const { - return GetCompositionLength() < GetPageLength(); + return GetPageLength() - GetCompositionLength() > COORDINATE_EPSILON; } bool Axis::CanScrollNow() const { diff --git a/gfx/layers/client/ClientLayerManager.cpp b/gfx/layers/client/ClientLayerManager.cpp index 628b580b929d..f4bdd1ea89cb 100644 --- a/gfx/layers/client/ClientLayerManager.cpp +++ b/gfx/layers/client/ClientLayerManager.cpp @@ -40,6 +40,7 @@ using namespace mozilla::gfx; ClientLayerManager::ClientLayerManager(nsIWidget* aWidget) : mPhase(PHASE_NONE) , mWidget(aWidget) + , mLatestTransactionId(0) , mTargetRotation(ROTATION_0) , mRepeatTransaction(false) , mIsRepeatTransaction(false) @@ -54,6 +55,9 @@ ClientLayerManager::ClientLayerManager(nsIWidget* aWidget) ClientLayerManager::~ClientLayerManager() { + if (mTransactionIdAllocator) { + DidComposite(mLatestTransactionId); + } ClearCachedResources(); // Stop receiveing AsyncParentMessage at Forwarder. // After the call, the message is directly handled by LayerTransactionChild. @@ -285,7 +289,7 @@ ClientLayerManager::Composite() } void -ClientLayerManager::DidComposite() +ClientLayerManager::DidComposite(uint64_t aTransactionId) { MOZ_ASSERT(mWidget); nsIWidgetListener *listener = mWidget->GetWidgetListener(); @@ -296,6 +300,7 @@ ClientLayerManager::DidComposite() if (listener) { listener->DidCompositeWindow(); } + mTransactionIdAllocator->NotifyTransactionCompleted(aTransactionId); } void @@ -421,11 +426,13 @@ ClientLayerManager::ForwardTransaction(bool aScheduleComposite) { mPhase = PHASE_FORWARD; + mLatestTransactionId = mTransactionIdAllocator->GetTransactionId(); + // forward this transaction's changeset to our LayerManagerComposite bool sent; AutoInfallibleTArray replies; if (HasShadowManager() && mForwarder->EndTransaction(&replies, mRegionToClear, - aScheduleComposite, mPaintSequenceNumber, &sent)) { + mLatestTransactionId, aScheduleComposite, mPaintSequenceNumber, &sent)) { for (nsTArray::size_type i = 0; i < replies.Length(); ++i) { const EditReply& reply = replies[i]; @@ -479,6 +486,12 @@ ClientLayerManager::ForwardTransaction(bool aScheduleComposite) if (sent) { mNeedsComposite = false; } + if (!sent || mForwarder->GetShadowManager()->HasNoCompositor()) { + // Clear the transaction id so that it doesn't get returned + // unless we forwarded to somewhere that doesn't actually + // have a compositor. + mTransactionIdAllocator->RevokeTransactionId(mLatestTransactionId); + } } else if (HasShadowManager()) { NS_WARNING("failed to forward Layers transaction"); } diff --git a/gfx/layers/client/ClientLayerManager.h b/gfx/layers/client/ClientLayerManager.h index 958a59407da8..24a61a651b92 100644 --- a/gfx/layers/client/ClientLayerManager.h +++ b/gfx/layers/client/ClientLayerManager.h @@ -24,6 +24,7 @@ #include "nsRect.h" // for nsIntRect #include "nsTArray.h" // for nsTArray #include "nscore.h" // for nsAString +#include "mozilla/layers/TransactionIdAllocator.h" class nsIWidget; @@ -166,7 +167,7 @@ public: virtual bool RequestOverfill(mozilla::dom::OverfillCallback* aCallback) MOZ_OVERRIDE; virtual void RunOverfillCallback(const uint32_t aOverfill) MOZ_OVERRIDE; - virtual void DidComposite(); + virtual void DidComposite(uint64_t aTransactionId); virtual bool SupportsMixBlendModes(EnumSet& aMixBlendModes) MOZ_OVERRIDE { @@ -210,6 +211,8 @@ public: // Get a copy of the compositor-side APZ test data for our layers ID. void GetCompositorSideAPZTestData(APZTestData* aData) const; + void SetTransactionIdAllocator(TransactionIdAllocator* aAllocator) { mTransactionIdAllocator = aAllocator; } + protected: enum TransactionPhase { PHASE_NONE, PHASE_CONSTRUCTION, PHASE_DRAWING, PHASE_FORWARD @@ -256,6 +259,9 @@ private: // back to mShadowTarget. nsRefPtr mShadowTarget; + nsRefPtr mTransactionIdAllocator; + uint64_t mLatestTransactionId; + // Sometimes we draw to targets that don't natively support // landscape/portrait orientation. When we need to implement that // ourselves, |mTargetRotation| describes the induced transform we diff --git a/gfx/layers/ipc/CompositorChild.cpp b/gfx/layers/ipc/CompositorChild.cpp index b74684d7c9eb..fa8f7a7828e4 100644 --- a/gfx/layers/ipc/CompositorChild.cpp +++ b/gfx/layers/ipc/CompositorChild.cpp @@ -124,15 +124,15 @@ CompositorChild::RecvInvalidateAll() } bool -CompositorChild::RecvDidComposite(const uint64_t& aId) +CompositorChild::RecvDidComposite(const uint64_t& aId, const uint64_t& aTransactionId) { if (mLayerManager) { MOZ_ASSERT(aId == 0); - mLayerManager->DidComposite(); + mLayerManager->DidComposite(aTransactionId); } else if (aId != 0) { dom::TabChild *child = dom::TabChild::GetFrom(aId); if (child) { - child->DidComposite(); + child->DidComposite(aTransactionId); } } return true; diff --git a/gfx/layers/ipc/CompositorChild.h b/gfx/layers/ipc/CompositorChild.h index 555fb53c6e19..0f5508a17396 100644 --- a/gfx/layers/ipc/CompositorChild.h +++ b/gfx/layers/ipc/CompositorChild.h @@ -58,7 +58,7 @@ public: virtual bool RecvOverfill(const uint32_t &aOverfill) MOZ_OVERRIDE; void AddOverfillObserver(ClientLayerManager* aLayerManager); - virtual bool RecvDidComposite(const uint64_t& aId) MOZ_OVERRIDE; + virtual bool RecvDidComposite(const uint64_t& aId, const uint64_t& aTransactionId) MOZ_OVERRIDE; private: // Private destructor, to discourage deletion outside of Release(): diff --git a/gfx/layers/ipc/CompositorParent.cpp b/gfx/layers/ipc/CompositorParent.cpp index 8409ca603e5c..1dde31028f10 100644 --- a/gfx/layers/ipc/CompositorParent.cpp +++ b/gfx/layers/ipc/CompositorParent.cpp @@ -66,6 +66,7 @@ CompositorParent::LayerTreeState::LayerTreeState() : mParent(nullptr) , mLayerManager(nullptr) , mCrossProcessParent(nullptr) + , mLayerTree(nullptr) { } @@ -193,6 +194,7 @@ CompositorParent::CompositorParent(nsIWidget* aWidget, : mWidget(aWidget) , mCurrentCompositeTask(nullptr) , mIsTesting(false) + , mPendingTransaction(0) , mPaused(false) , mUseExternalSurfaceSize(aUseExternalSurfaceSize) , mEGLSurfaceSize(aSurfaceWidth, aSurfaceHeight) @@ -200,7 +202,6 @@ CompositorParent::CompositorParent(nsIWidget* aWidget, , mResumeCompositionMonitor("ResumeCompositionMonitor") , mOverrideComposeReadiness(false) , mForceCompositionTask(nullptr) - , mWantDidCompositeEvent(false) { NS_ABORT_IF_FALSE(sCompositorThread != nullptr || sCompositorThreadID, "The compositor thread must be Initialized before instanciating a COmpositorParent."); @@ -417,6 +418,7 @@ CompositorParent::PauseComposition() mPaused = true; mCompositor->Pause(); + DidComposite(); } // if anyone's waiting to make sure that composition really got paused, tell them @@ -443,7 +445,7 @@ CompositorParent::ResumeComposition() mPaused = false; - Composite(); + CompositeToTarget(nullptr); // if anyone's waiting to make sure that composition really got resumed, tell them lock.NotifyAll(); @@ -541,8 +543,6 @@ CompositorParent::NotifyShadowTreeTransaction(uint64_t aId, bool aIsFirstPaint, if (aScheduleComposite) { ScheduleComposition(); } - - mWantDidCompositeEvent = true; } // Used when layout.frame_rate is -1. Needs to be kept in sync with @@ -585,7 +585,7 @@ CompositorParent::ScheduleComposition() rate == 0 ? 0.0 : std::max(0.0, 1000.0 / rate)); - mCurrentCompositeTask = NewRunnableMethod(this, &CompositorParent::Composite); + mCurrentCompositeTask = NewRunnableMethod(this, &CompositorParent::CompositeCallback); if (!initialComposition && delta < minFrameDelta) { TimeDuration delay = minFrameDelta - delta; @@ -602,8 +602,9 @@ CompositorParent::ScheduleComposition() } void -CompositorParent::Composite() +CompositorParent::CompositeCallback() { + mCurrentCompositeTask = nullptr; CompositeToTarget(nullptr); } @@ -624,14 +625,10 @@ CompositorParent::CompositeToTarget(DrawTarget* aTarget, const nsIntRect* aRect) } #endif - if (mCurrentCompositeTask) { - mCurrentCompositeTask->Cancel(); - mCurrentCompositeTask = nullptr; - } - mLastCompose = TimeStamp::Now(); if (!CanComposite()) { + DidComposite(); return; } @@ -672,9 +669,8 @@ CompositorParent::CompositeToTarget(DrawTarget* aTarget, const nsIntRect* aRect) mLayerManager->SetDebugOverlayWantsNextFrame(false); mLayerManager->EndEmptyTransaction(); - if (!aTarget && mWantDidCompositeEvent) { + if (!aTarget) { DidComposite(); - mWantDidCompositeEvent = false; } if (mLayerManager->DebugOverlayWantsNextFrame()) { @@ -704,20 +700,6 @@ CompositorParent::CompositeToTarget(DrawTarget* aTarget, const nsIntRect* aRect) profiler_tracing("Paint", "Composite", TRACING_INTERVAL_END); } -void -CompositorParent::DidComposite() -{ - unused << SendDidComposite(0); - - for (LayerTreeMap::iterator it = sIndirectLayerTrees.begin(); - it != sIndirectLayerTrees.end(); it++) { - LayerTreeState* lts = &it->second; - if (lts->mParent == this && lts->mCrossProcessParent) { - unused << lts->mCrossProcessParent->SendDidComposite(it->first); - } - } -} - void CompositorParent::ForceComposeToTarget(DrawTarget* aTarget, const nsIntRect* aRect) { @@ -773,6 +755,7 @@ CompositorParent::ScheduleRotationOnCompositorThread(const TargetConfig& aTarget void CompositorParent::ShadowLayersUpdated(LayerTransactionParent* aLayerTree, + const uint64_t& aTransactionId, const TargetConfig& aTargetConfig, bool aIsFirstPaint, bool aScheduleComposite, @@ -796,11 +779,17 @@ CompositorParent::ShadowLayersUpdated(LayerTransactionParent* aLayerTree, mRootLayerTreeID, aPaintSequenceNumber); } + MOZ_ASSERT(aTransactionId > mPendingTransaction); + mPendingTransaction = aTransactionId; + if (root) { SetShadowProperties(root); } if (aScheduleComposite) { ScheduleComposition(); + if (mPaused) { + DidComposite(); + } // When testing we synchronously update the shadow tree with the animated // values to avoid race conditions when calling GetAnimationTransform etc. // (since the above SetShadowProperties will remove animation effects). @@ -813,11 +802,12 @@ CompositorParent::ShadowLayersUpdated(LayerTransactionParent* aLayerTree, mCompositionManager->TransformShadowTree(mTestTime); if (!requestNextFrame) { CancelCurrentCompositeTask(); + // Pretend we composited in case someone is wating for this event. + DidComposite(); } } } mLayerManager->NotifyShadowTreeTransaction(); - mWantDidCompositeEvent = true; } void @@ -843,6 +833,8 @@ CompositorParent::SetTestSampleTime(LayerTransactionParent* aLayerTree, bool requestNextFrame = mCompositionManager->TransformShadowTree(aTime); if (!requestNextFrame) { CancelCurrentCompositeTask(); + // Pretend we composited in case someone is wating for this event. + DidComposite(); } } @@ -1151,6 +1143,7 @@ public: virtual bool DeallocPLayerTransactionParent(PLayerTransactionParent* aLayers) MOZ_OVERRIDE; virtual void ShadowLayersUpdated(LayerTransactionParent* aLayerTree, + const uint64_t& aTransactionId, const TargetConfig& aTargetConfig, bool aIsFirstPaint, bool aScheduleComposite, @@ -1164,6 +1157,7 @@ public: virtual AsyncCompositionManager* GetCompositionManager(LayerTransactionParent* aParent) MOZ_OVERRIDE; + void DidComposite(uint64_t aId); private: // Private destructor, to discourage deletion outside of Release(): virtual ~CrossProcessCompositorParent(); @@ -1179,6 +1173,23 @@ private: base::ProcessId mChildProcessId; }; +void +CompositorParent::DidComposite() +{ + if (mPendingTransaction) { + unused << SendDidComposite(0, mPendingTransaction); + mPendingTransaction = 0; + } + + for (LayerTreeMap::iterator it = sIndirectLayerTrees.begin(); + it != sIndirectLayerTrees.end(); it++) { + LayerTreeState* lts = &it->second; + if (lts->mParent == this && lts->mCrossProcessParent) { + static_cast(lts->mCrossProcessParent)->DidComposite(it->first); + } + } +} + static void OpenCompositor(CrossProcessCompositorParent* aCompositor, Transport* aTransport, ProcessHandle aHandle, @@ -1253,7 +1264,6 @@ RemoveIndirectTree(uint64_t aId) void CrossProcessCompositorParent::ActorDestroy(ActorDestroyReason aWhy) { - fprintf(stderr, " --- CrossProcessCompositorParent ActorDestroy\n"); MessageLoop::current()->PostTask( FROM_HERE, NewRunnableMethod(this, &CrossProcessCompositorParent::DeferredDestroy)); @@ -1280,6 +1290,7 @@ CrossProcessCompositorParent::AllocPLayerTransactionParent(const nsTArrayAddIPDLReference(); + sIndirectLayerTrees[aId].mLayerTree = p; return p; } @@ -1317,6 +1328,7 @@ CrossProcessCompositorParent::RecvNotifyChildCreated(const uint64_t& child) void CrossProcessCompositorParent::ShadowLayersUpdated( LayerTransactionParent* aLayerTree, + const uint64_t& aTransactionId, const TargetConfig& aTargetConfig, bool aIsFirstPaint, bool aScheduleComposite, @@ -1341,6 +1353,17 @@ CrossProcessCompositorParent::ShadowLayersUpdated( state->mParent->NotifyShadowTreeTransaction(id, aIsFirstPaint, aScheduleComposite, aPaintSequenceNumber); + aLayerTree->SetPendingTransactionId(aTransactionId); +} + +void +CrossProcessCompositorParent::DidComposite(uint64_t aId) +{ + LayerTransactionParent *layerTree = sIndirectLayerTrees[aId].mLayerTree; + if (layerTree && layerTree->GetPendingTransactionId()) { + unused << SendDidComposite(aId, layerTree->GetPendingTransactionId()); + layerTree->SetPendingTransactionId(0); + } } void @@ -1406,8 +1429,6 @@ CrossProcessCompositorParent::GetCompositionManager(LayerTransactionParent* aLay void CrossProcessCompositorParent::DeferredDestroy() { - - fprintf(stderr, " --- CrossProcessCompositorParent DeferredDestroy\n"); CrossProcessCompositorParent* self; mSelfRef.forget(&self); @@ -1418,8 +1439,6 @@ CrossProcessCompositorParent::DeferredDestroy() CrossProcessCompositorParent::~CrossProcessCompositorParent() { - fprintf(stderr, " --- CrossProcessCompositorParent destructor\n"); - XRE_GetIOMessageLoop()->PostTask(FROM_HERE, new DeleteTask(mTransport)); } diff --git a/gfx/layers/ipc/CompositorParent.h b/gfx/layers/ipc/CompositorParent.h index d9627800a5e2..56c335c20306 100644 --- a/gfx/layers/ipc/CompositorParent.h +++ b/gfx/layers/ipc/CompositorParent.h @@ -96,6 +96,7 @@ public: virtual void ActorDestroy(ActorDestroyReason why) MOZ_OVERRIDE; virtual void ShadowLayersUpdated(LayerTransactionParent* aLayerTree, + const uint64_t& aTransactionId, const TargetConfig& aTargetConfig, bool aIsFirstPaint, bool aScheduleComposite, @@ -226,6 +227,7 @@ public: PCompositorParent* mCrossProcessParent; TargetConfig mTargetConfig; APZTestData mApzTestData; + LayerTransactionParent* mLayerTree; }; /** @@ -253,7 +255,7 @@ protected: bool* aSuccess) MOZ_OVERRIDE; virtual bool DeallocPLayerTransactionParent(PLayerTransactionParent* aLayers) MOZ_OVERRIDE; virtual void ScheduleTask(CancelableTask*, int); - void Composite(); + void CompositeCallback(); void CompositeToTarget(gfx::DrawTarget* aTarget, const nsIntRect* aRect = nullptr); void ForceComposeToTarget(gfx::DrawTarget* aTarget, const nsIntRect* aRect = nullptr); @@ -327,6 +329,8 @@ protected: TimeStamp mExpectedComposeStartTime; #endif + uint64_t mPendingTransaction; + bool mPaused; bool mUseExternalSurfaceSize; @@ -343,8 +347,6 @@ protected: nsRefPtr mApzcTreeManager; - bool mWantDidCompositeEvent; - DISALLOW_EVIL_CONSTRUCTORS(CompositorParent); }; diff --git a/gfx/layers/ipc/LayerTransactionChild.h b/gfx/layers/ipc/LayerTransactionChild.h index 0ad44bf48bb9..9619da42fe05 100644 --- a/gfx/layers/ipc/LayerTransactionChild.h +++ b/gfx/layers/ipc/LayerTransactionChild.h @@ -39,6 +39,9 @@ public: bool IPCOpen() const { return mIPCOpen; } + void SetHasNoCompositor() { mHasNoCompositor = true; } + bool HasNoCompositor() { return mHasNoCompositor; } + void SetForwarder(ShadowLayerForwarder* aForwarder) { mForwarder = aForwarder; @@ -46,9 +49,10 @@ public: protected: LayerTransactionChild() - : mIPCOpen(false) + : mForwarder(nullptr) + , mIPCOpen(false) , mDestroyed(false) - , mForwarder(nullptr) + , mHasNoCompositor(false) {} ~LayerTransactionChild() { } @@ -80,9 +84,10 @@ protected: friend class CompositorChild; friend class layout::RenderFrameChild; + ShadowLayerForwarder* mForwarder; bool mIPCOpen; bool mDestroyed; - ShadowLayerForwarder* mForwarder; + bool mHasNoCompositor; }; } // namespace layers diff --git a/gfx/layers/ipc/LayerTransactionParent.cpp b/gfx/layers/ipc/LayerTransactionParent.cpp index 680c9c2497fb..f53a84ef887a 100644 --- a/gfx/layers/ipc/LayerTransactionParent.cpp +++ b/gfx/layers/ipc/LayerTransactionParent.cpp @@ -148,6 +148,7 @@ LayerTransactionParent::LayerTransactionParent(LayerManagerComposite* aManager, : mLayerManager(aManager) , mShadowLayersManager(aLayersManager) , mId(aId) + , mPendingTransaction(0) , mChildProcessId(aOtherProcess) , mDestroyed(false) , mIPCOpen(false) @@ -179,17 +180,19 @@ LayerTransactionParent::GetCompositorBackendType() const bool LayerTransactionParent::RecvUpdateNoSwap(const InfallibleTArray& cset, + const uint64_t& aTransactionId, const TargetConfig& targetConfig, const bool& isFirstPaint, const bool& scheduleComposite, const uint32_t& paintSequenceNumber) { - return RecvUpdate(cset, targetConfig, isFirstPaint, scheduleComposite, - paintSequenceNumber, nullptr); + return RecvUpdate(cset, aTransactionId, targetConfig, isFirstPaint, + scheduleComposite, paintSequenceNumber, nullptr); } bool LayerTransactionParent::RecvUpdate(const InfallibleTArray& cset, + const uint64_t& aTransactionId, const TargetConfig& targetConfig, const bool& isFirstPaint, const bool& scheduleComposite, @@ -549,8 +552,8 @@ LayerTransactionParent::RecvUpdate(const InfallibleTArray& cset, // other's buffer contents. LayerManagerComposite::PlatformSyncBeforeReplyUpdate(); - mShadowLayersManager->ShadowLayersUpdated(this, targetConfig, isFirstPaint, - scheduleComposite, paintSequenceNumber); + mShadowLayersManager->ShadowLayersUpdated(this, aTransactionId, targetConfig, + isFirstPaint, scheduleComposite, paintSequenceNumber); #ifdef COMPOSITOR_PERFORMANCE_WARNING int compositeTime = (int)(mozilla::TimeStamp::Now() - updateStart).ToMilliseconds(); diff --git a/gfx/layers/ipc/LayerTransactionParent.h b/gfx/layers/ipc/LayerTransactionParent.h index a3deeb8d9098..c9329bd91207 100644 --- a/gfx/layers/ipc/LayerTransactionParent.h +++ b/gfx/layers/ipc/LayerTransactionParent.h @@ -81,6 +81,9 @@ public: virtual bool IsSameProcess() const MOZ_OVERRIDE; + const uint64_t& GetPendingTransactionId() { return mPendingTransaction; } + void SetPendingTransactionId(uint64_t aId) { mPendingTransaction = aId; } + // CompositableParentManager virtual void SendFenceHandle(AsyncTransactionTracker* aTracker, PTextureParent* aTexture, @@ -95,6 +98,7 @@ public: protected: virtual bool RecvUpdate(const EditArray& cset, + const uint64_t& aTransactionId, const TargetConfig& targetConfig, const bool& isFirstPaint, const bool& scheduleComposite, @@ -102,6 +106,7 @@ protected: EditReplyArray* reply) MOZ_OVERRIDE; virtual bool RecvUpdateNoSwap(const EditArray& cset, + const uint64_t& aTransactionId, const TargetConfig& targetConfig, const bool& isFirstPaint, const bool& scheduleComposite, @@ -164,6 +169,8 @@ private: // mId != 0 => mRoot == null // because the "real tree" is owned by the compositor. uint64_t mId; + + uint64_t mPendingTransaction; // When the widget/frame/browser stuff in this process begins its // destruction process, we need to Disconnect() all the currently // live shadow layers, because some of them might be orphaned from diff --git a/gfx/layers/ipc/PCompositor.ipdl b/gfx/layers/ipc/PCompositor.ipdl index fe8e3f647844..85cb18a11ec9 100644 --- a/gfx/layers/ipc/PCompositor.ipdl +++ b/gfx/layers/ipc/PCompositor.ipdl @@ -42,7 +42,7 @@ child: // The compositor completed a layers transaction. id is the layers id // of the child layer tree that was composited (or 0 when notifying // the root layer tree). - async DidComposite(uint64_t id); + async DidComposite(uint64_t id, uint64_t transactionId); // The parent sends the child the requested fill ratio numbers. async Overfill(uint32_t aOverfill); diff --git a/gfx/layers/ipc/PLayerTransaction.ipdl b/gfx/layers/ipc/PLayerTransaction.ipdl index 0adebfc0a9c2..8167cbd093ab 100644 --- a/gfx/layers/ipc/PLayerTransaction.ipdl +++ b/gfx/layers/ipc/PLayerTransaction.ipdl @@ -52,9 +52,14 @@ parent: // The isFirstPaint flag can be used to indicate that this is the first update // for a particular document. - sync Update(Edit[] cset, TargetConfig targetConfig, bool isFirstPaint, + sync Update(Edit[] cset, uint64_t id, TargetConfig targetConfig, bool isFirstPaint, bool scheduleComposite, uint32_t paintSequenceNumber) returns (EditReply[] reply); + + // We don't need to send a sync transaction if + // no transaction operate require a swap. + async UpdateNoSwap(Edit[] cset, uint64_t id, TargetConfig targetConfig, bool isFirstPaint, + bool scheduleComposite, uint32_t paintSequenceNumber); // Testing APIs @@ -78,11 +83,6 @@ parent: // Useful for testing rendering of async scrolling. async SetAsyncScrollOffset(PLayer layer, int32_t x, int32_t y); - // We don't need to send a sync transaction if - // no transaction operate require a swap. - async UpdateNoSwap(Edit[] cset, TargetConfig targetConfig, bool isFirstPaint, - bool scheduleComposite, uint32_t paintSequenceNumber); - // Drop any front buffers that might be retained on the compositor // side. async ClearCachedResources(); diff --git a/gfx/layers/ipc/ShadowLayers.cpp b/gfx/layers/ipc/ShadowLayers.cpp index 05f843f78db8..6ad02f89748b 100644 --- a/gfx/layers/ipc/ShadowLayers.cpp +++ b/gfx/layers/ipc/ShadowLayers.cpp @@ -451,12 +451,15 @@ ShadowLayerForwarder::RemoveTexture(TextureClient* aTexture) bool ShadowLayerForwarder::EndTransaction(InfallibleTArray* aReplies, const nsIntRegion& aRegionToClear, + uint64_t aId, bool aScheduleComposite, uint32_t aPaintSequenceNumber, bool* aSent) { *aSent = false; + MOZ_ASSERT(aId); + PROFILER_LABEL("ShadowLayerForwarder", "EndTranscation"); RenderTraceScope rendertrace("Foward Transaction", "000091"); NS_ABORT_IF_FALSE(HasShadowManager(), "no manager to forward to"); @@ -562,7 +565,7 @@ ShadowLayerForwarder::EndTransaction(InfallibleTArray* aReplies, RenderTraceScope rendertrace3("Forward Transaction", "000093"); if (!HasShadowManager() || !mShadowManager->IPCOpen() || - !mShadowManager->SendUpdate(cset, targetConfig, mIsFirstPaint, + !mShadowManager->SendUpdate(cset, aId, targetConfig, mIsFirstPaint, aScheduleComposite, aPaintSequenceNumber, aReplies)) { MOZ_LAYERS_LOG(("[LayersForwarder] WARNING: sending transaction failed!")); @@ -575,7 +578,7 @@ ShadowLayerForwarder::EndTransaction(InfallibleTArray* aReplies, RenderTraceScope rendertrace3("Forward NoSwap Transaction", "000093"); if (!HasShadowManager() || !mShadowManager->IPCOpen() || - !mShadowManager->SendUpdateNoSwap(cset, targetConfig, mIsFirstPaint, + !mShadowManager->SendUpdateNoSwap(cset, aId, targetConfig, mIsFirstPaint, aPaintSequenceNumber, aScheduleComposite)) { MOZ_LAYERS_LOG(("[LayersForwarder] WARNING: sending transaction failed!")); return false; diff --git a/gfx/layers/ipc/ShadowLayers.h b/gfx/layers/ipc/ShadowLayers.h index 9d68105652ac..09d92b14dde8 100644 --- a/gfx/layers/ipc/ShadowLayers.h +++ b/gfx/layers/ipc/ShadowLayers.h @@ -295,6 +295,7 @@ public: */ bool EndTransaction(InfallibleTArray* aReplies, const nsIntRegion& aRegionToClear, + uint64_t aId, bool aScheduleComposite, uint32_t aPaintSequenceNumber, bool* aSent); diff --git a/gfx/layers/ipc/ShadowLayersManager.h b/gfx/layers/ipc/ShadowLayersManager.h index 02fa130966e3..1ca7dd1f46f5 100644 --- a/gfx/layers/ipc/ShadowLayersManager.h +++ b/gfx/layers/ipc/ShadowLayersManager.h @@ -19,6 +19,7 @@ class ShadowLayersManager { public: virtual void ShadowLayersUpdated(LayerTransactionParent* aLayerTree, + const uint64_t& aTransactionId, const TargetConfig& aTargetConfig, bool aIsFirstPaint, bool aScheduleComposite, diff --git a/gfx/layers/moz.build b/gfx/layers/moz.build index a8ff78c1ba0c..eb7123dea675 100644 --- a/gfx/layers/moz.build +++ b/gfx/layers/moz.build @@ -163,6 +163,7 @@ EXPORTS.mozilla.layers += [ 'opengl/TextureClientOGL.h', 'opengl/TextureHostOGL.h', 'RenderTrace.h', + 'TransactionIdAllocator.h', 'YCbCrImageDataSerializer.h', ] diff --git a/gfx/src/nsCoord.h b/gfx/src/nsCoord.h index 1eb47b5a4355..ea4668347d3b 100644 --- a/gfx/src/nsCoord.h +++ b/gfx/src/nsCoord.h @@ -268,6 +268,52 @@ inline nscoord NSToCoordCeilClamped(double aValue) return NSToCoordCeil(aValue); } +// The NSToCoordTrunc* functions remove the fractional component of +// aValue, and are thus equivalent to NSToCoordFloor* for positive +// values and NSToCoordCeil* for negative values. + +inline nscoord NSToCoordTrunc(float aValue) +{ + // There's no need to use truncf() since it matches the default + // rules for float to integer conversion. + return nscoord(aValue); +} + +inline nscoord NSToCoordTrunc(double aValue) +{ + // There's no need to use trunc() since it matches the default + // rules for float to integer conversion. + return nscoord(aValue); +} + +inline nscoord NSToCoordTruncClamped(float aValue) +{ +#ifndef NS_COORD_IS_FLOAT + // Bounds-check before converting out of float, to avoid overflow + if (aValue >= nscoord_MAX) { + return nscoord_MAX; + } + if (aValue <= nscoord_MIN) { + return nscoord_MIN; + } +#endif + return NSToCoordTrunc(aValue); +} + +inline nscoord NSToCoordTruncClamped(double aValue) +{ +#ifndef NS_COORD_IS_FLOAT + // Bounds-check before converting out of double, to avoid overflow + if (aValue >= nscoord_MAX) { + return nscoord_MAX; + } + if (aValue <= nscoord_MIN) { + return nscoord_MIN; + } +#endif + return NSToCoordTrunc(aValue); +} + /* * Int Rounding Functions */ diff --git a/hal/gonk/GonkHal.cpp b/hal/gonk/GonkHal.cpp index d3f062c8be89..c44516afc917 100644 --- a/hal/gonk/GonkHal.cpp +++ b/hal/gonk/GonkHal.cpp @@ -66,6 +66,7 @@ #include "nsXULAppAPI.h" #include "OrientationObserver.h" #include "UeventPoller.h" +#include "nsIWritablePropertyBag2.h" #include #define LOG(args...) __android_log_print(ANDROID_LOG_INFO, "Gonk", args) @@ -290,6 +291,25 @@ public: hal_impl::SetLight(hal::eHalLightID_Battery, aConfig); hal::NotifyBatteryChange(info); + + { + // bug 975667 + // Gecko gonk hal is required to emit battery charging/level notification via nsIObserverService. + // This is useful for XPCOM components that are not statically linked to Gecko and cannot call + // hal::EnableBatteryNotifications + nsCOMPtr obsService = mozilla::services::GetObserverService(); + nsCOMPtr propbag = + do_CreateInstance("@mozilla.org/hash-property-bag;1"); + if (obsService && propbag) { + propbag->SetPropertyAsBool(NS_LITERAL_STRING("charging"), + info.charging()); + propbag->SetPropertyAsDouble(NS_LITERAL_STRING("level"), + info.level()); + + obsService->NotifyObservers(propbag, "gonkhal-battery-notifier", nullptr); + } + } + return NS_OK; } }; diff --git a/intl/icu/Makefile.in b/intl/icu/Makefile.in index 9e90bc9beda1..fe92ea5f5d2c 100644 --- a/intl/icu/Makefile.in +++ b/intl/icu/Makefile.in @@ -9,15 +9,10 @@ ifdef ENABLE_INTL_API ifndef MOZ_NATIVE_ICU # Library names: On Windows, ICU uses modified library names for static # and debug libraries. - ifeq ($(OS_ARCH),WINNT) - ifdef MOZ_DEBUG - ICU_LIB_SUFFIX=d - endif - endif # WINNT ifdef MOZ_SHARED_ICU ifeq ($(OS_ARCH),WINNT) ifdef JS_SHARED_LIBRARY - ICU_FILES := $(foreach libname,$(ICU_LIB_NAMES),$(DEPTH)/intl/icu/target/lib/$(libname)$(ICU_LIB_SUFFIX)$(MOZ_ICU_VERSION).dll) + ICU_FILES := $(foreach libname,$(ICU_LIB_NAMES),$(DEPTH)/intl/icu/target/lib/$(libname)$(MOZ_ICU_DBG_SUFFIX)$(MOZ_ICU_VERSION).dll) endif else # ! WINNT ifeq ($(OS_ARCH),Darwin) @@ -39,7 +34,7 @@ ifdef ENABLE_INTL_API else # !MOZ_SHARED_ICU ifeq ($(OS_ARCH),WINNT) ICU_LIB_RENAME = $(foreach libname,$(ICU_LIB_NAMES),\ - cp -p $(DEPTH)/intl/icu/target/lib/s$(libname)$(ICU_LIB_SUFFIX).lib $(DEPTH)/intl/icu/target/lib/$(libname).lib;) + cp -p $(DEPTH)/intl/icu/target/lib/s$(libname)$(MOZ_ICU_DBG_SUFFIX).lib $(DEPTH)/intl/icu/target/lib/$(libname).lib;) endif endif # MOZ_SHARED_ICU endif # !MOZ_NATIVE_ICU diff --git a/ipc/chromium/src/base/basictypes.h b/ipc/chromium/src/base/basictypes.h index 13c60bb1e99a..d7499a1e77a7 100644 --- a/ipc/chromium/src/base/basictypes.h +++ b/ipc/chromium/src/base/basictypes.h @@ -169,13 +169,15 @@ inline To implicit_cast(From const &f) { // the expression is false, most compilers will issue a warning/error // containing the name of the variable. +// Avoid multiple definitions for webrtc +#if !defined(COMPILE_ASSERT) template struct CompileAssert { }; -#undef COMPILE_ASSERT #define COMPILE_ASSERT(expr, msg) \ typedef CompileAssert<(bool(expr))> msg[bool(expr) ? 1 : -1] +#endif // Implementation details of COMPILE_ASSERT: // diff --git a/js/src/assembler/assembler/X86Assembler.h b/js/src/assembler/assembler/X86Assembler.h index 8894b647a0f2..0d12c837dc5d 100644 --- a/js/src/assembler/assembler/X86Assembler.h +++ b/js/src/assembler/assembler/X86Assembler.h @@ -709,6 +709,21 @@ public: m_formatter.oneByteOp64(OP_AND_GvEv, dst, base, offset); } + void andq_mr(int offset, RegisterID base, RegisterID index, int scale, RegisterID dst) + { + spew("andq %s0x%x(%s,%s,%d), %s", + PRETTY_PRINT_OFFSET(offset), nameIReg(8,base), nameIReg(8,index), 1< DebugModeOSREntryVector; +class UniqueScriptOSREntryIter +{ + const DebugModeOSREntryVector &entries_; + size_t index_; + + public: + UniqueScriptOSREntryIter(const DebugModeOSREntryVector &entries) + : entries_(entries), + index_(0) + { } + + bool done() { + return index_ == entries_.length(); + } + + const DebugModeOSREntry &entry() { + MOZ_ASSERT(!done()); + return entries_[index_]; + } + + UniqueScriptOSREntryIter &operator++() { + MOZ_ASSERT(!done()); + while (++index_ < entries_.length()) { + bool unique = true; + for (size_t i = 0; i < index_; i++) { + if (entries_[i].script == entries_[index_].script) { + unique = false; + break; + } + } + if (unique) + break; + } + return *this; + } +}; + static bool CollectOnStackScripts(JSContext *cx, const JitActivationIterator &activation, DebugModeOSREntryVector &entries) @@ -551,11 +588,12 @@ UndoRecompileBaselineScriptsForDebugMode(JSContext *cx, { // In case of failure, roll back the entire set of active scripts so that // we don't have to patch return addresses on the stack. - for (size_t i = 0; i < entries.length(); i++) { - JSScript *script = entries[i].script; + for (UniqueScriptOSREntryIter iter(entries); !iter.done(); ++iter) { + const DebugModeOSREntry &entry = iter.entry(); + JSScript *script = entry.script; BaselineScript *baselineScript = script->baselineScript(); - if (entries[i].recompiled()) { - script->setBaselineScript(cx, entries[i].oldBaselineScript); + if (entry.recompiled()) { + script->setBaselineScript(cx, entry.oldBaselineScript); BaselineScript::Destroy(cx->runtime()->defaultFreeOp(), baselineScript); } } @@ -602,9 +640,10 @@ jit::RecompileOnStackBaselineScriptsForDebugMode(JSContext *cx, JSCompartment *c // // After this point the function must be infallible. - for (size_t i = 0; i < entries.length(); i++) { - if (entries[i].recompiled()) - BaselineScript::Destroy(cx->runtime()->defaultFreeOp(), entries[i].oldBaselineScript); + for (UniqueScriptOSREntryIter iter(entries); !iter.done(); ++iter) { + const DebugModeOSREntry &entry = iter.entry(); + if (entry.recompiled()) + BaselineScript::Destroy(cx->runtime()->defaultFreeOp(), entry.oldBaselineScript); } size_t processed = 0; diff --git a/js/src/jit/CodeGenerator.cpp b/js/src/jit/CodeGenerator.cpp index d03cd93239c8..e8af37c71863 100644 --- a/js/src/jit/CodeGenerator.cpp +++ b/js/src/jit/CodeGenerator.cpp @@ -859,9 +859,9 @@ static const VMFunctionsModal PrimitiveToStringInfo = VMFunctionsModal( FunctionInfo(PrimitiveToStringPar)); bool -CodeGenerator::visitPrimitiveToString(LPrimitiveToString *lir) +CodeGenerator::visitValueToString(LValueToString *lir) { - ValueOperand input = ToValue(lir, LPrimitiveToString::Input); + ValueOperand input = ToValue(lir, LValueToString::Input); Register output = ToRegister(lir->output()); OutOfLineCode *ool = oolCallVM(PrimitiveToStringInfo, lir, (ArgList(), input), @@ -931,9 +931,18 @@ CodeGenerator::visitPrimitiveToString(LPrimitiveToString *lir) masm.bind(¬Boolean); } + // Object + if (lir->mir()->input()->mightBeType(MIRType_Object)) { + // Bail. + JS_ASSERT(lir->mir()->fallible()); + Label bail; + masm.branchTestObject(Assembler::Equal, tag, &bail); + if (!bailoutFrom(&bail, lir->snapshot())) + return false; + } + #ifdef DEBUG - // Objects are not supported or we see a type that wasn't accounted for. - masm.assumeUnreachable("Unexpected type for MPrimitiveToString."); + masm.assumeUnreachable("Unexpected type for MValueToString."); #endif masm.bind(&done); @@ -5833,7 +5842,7 @@ CodeGenerator::emitArrayPopShift(LInstruction *lir, const MArrayPopShift *mir, R } // VM call if a write barrier is necessary. - masm.branchTestNeedsBarrier(Assembler::NonZero, lengthTemp, ool->entry()); + masm.branchTestNeedsBarrier(Assembler::NonZero, ool->entry()); // Load elements and length. masm.loadPtr(Address(obj, JSObject::offsetOfElements()), elementsTemp); @@ -6096,7 +6105,7 @@ CodeGenerator::visitIteratorStart(LIteratorStart *lir) masm.branchPtr(Assembler::NotEqual, objAddr, obj, ool->entry()); #else Label noBarrier; - masm.branchTestNeedsBarrier(Assembler::Zero, temp1, &noBarrier); + masm.branchTestNeedsBarrier(Assembler::Zero, &noBarrier); Address objAddr(niTemp, offsetof(NativeIterator, obj)); masm.branchPtr(Assembler::NotEqual, objAddr, obj, ool->entry()); diff --git a/js/src/jit/CodeGenerator.h b/js/src/jit/CodeGenerator.h index ca25c405b802..1211bfccaad2 100644 --- a/js/src/jit/CodeGenerator.h +++ b/js/src/jit/CodeGenerator.h @@ -93,7 +93,7 @@ class CodeGenerator : public CodeGeneratorSpecific void emitIntToString(Register input, Register output, Label *ool); bool visitIntToString(LIntToString *lir); bool visitDoubleToString(LDoubleToString *lir); - bool visitPrimitiveToString(LPrimitiveToString *lir); + bool visitValueToString(LValueToString *lir); bool visitInteger(LInteger *lir); bool visitRegExp(LRegExp *lir); bool visitRegExpExec(LRegExpExec *lir); diff --git a/js/src/jit/IonBuilder.h b/js/src/jit/IonBuilder.h index 2a7de06be90a..07f8df0bbc0b 100644 --- a/js/src/jit/IonBuilder.h +++ b/js/src/jit/IonBuilder.h @@ -736,6 +736,7 @@ class IonBuilder : public MIRGenerator InliningStatus inlineHaveSameClass(CallInfo &callInfo); InliningStatus inlineToObject(CallInfo &callInfo); InliningStatus inlineToInteger(CallInfo &callInfo); + InliningStatus inlineToString(CallInfo &callInfo); InliningStatus inlineDump(CallInfo &callInfo); InliningStatus inlineHasClass(CallInfo &callInfo, const Class *clasp) { return inlineHasClasses(callInfo, clasp, nullptr); diff --git a/js/src/jit/IonMacroAssembler.h b/js/src/jit/IonMacroAssembler.h index 350bf7516ad9..2ff1928d0bf5 100644 --- a/js/src/jit/IonMacroAssembler.h +++ b/js/src/jit/IonMacroAssembler.h @@ -645,11 +645,10 @@ class MacroAssembler : public MacroAssemblerSpecific branch32(cond, length, Imm32(key.constant()), label); } - void branchTestNeedsBarrier(Condition cond, Register scratch, Label *label) { + void branchTestNeedsBarrier(Condition cond, Label *label) { JS_ASSERT(cond == Zero || cond == NonZero); CompileZone *zone = GetIonContext()->compartment->zone(); - movePtr(ImmPtr(zone->addressOfNeedsBarrier()), scratch); - Address needsBarrierAddr(scratch, 0); + AbsoluteAddress needsBarrierAddr(zone->addressOfNeedsBarrier()); branchTest32(cond, needsBarrierAddr, Imm32(0x1), label); } diff --git a/js/src/jit/LIR-Common.h b/js/src/jit/LIR-Common.h index 7062e3d04a89..0a2f486aee39 100644 --- a/js/src/jit/LIR-Common.h +++ b/js/src/jit/LIR-Common.h @@ -3182,12 +3182,12 @@ class LDoubleToString : public LInstructionHelper<1, 1, 1> }; // Convert a primitive to a string with a function call. -class LPrimitiveToString : public LInstructionHelper<1, BOX_PIECES, 1> +class LValueToString : public LInstructionHelper<1, BOX_PIECES, 1> { public: - LIR_HEADER(PrimitiveToString) + LIR_HEADER(ValueToString) - explicit LPrimitiveToString(const LDefinition &tempToUnbox) + LValueToString(const LDefinition &tempToUnbox) { setTemp(0, tempToUnbox); } diff --git a/js/src/jit/LOpcodes.h b/js/src/jit/LOpcodes.h index 63ffe52a1c65..45c3757b6bec 100644 --- a/js/src/jit/LOpcodes.h +++ b/js/src/jit/LOpcodes.h @@ -147,7 +147,7 @@ _(BooleanToString) \ _(IntToString) \ _(DoubleToString) \ - _(PrimitiveToString) \ + _(ValueToString) \ _(Start) \ _(OsrEntry) \ _(OsrValue) \ diff --git a/js/src/jit/Lowering.cpp b/js/src/jit/Lowering.cpp index 154fbdfd59b1..48f6b270eff8 100644 --- a/js/src/jit/Lowering.cpp +++ b/js/src/jit/Lowering.cpp @@ -1915,10 +1915,14 @@ LIRGenerator::visitToString(MToString *ins) return assignSafepoint(lir, ins); } + case MIRType_String: + return redefine(ins, ins->input()); + case MIRType_Value: { - JS_ASSERT(!opd->mightBeType(MIRType_Object)); - LPrimitiveToString *lir = new(alloc()) LPrimitiveToString(tempToUnbox()); - if (!useBox(lir, LPrimitiveToString::Input, opd)) + LValueToString *lir = new(alloc()) LValueToString(tempToUnbox()); + if (!useBox(lir, LValueToString::Input, opd)) + return false; + if (ins->fallible() && !assignSnapshot(lir)) return false; if (!define(lir, ins)) return false; @@ -1926,7 +1930,7 @@ LIRGenerator::visitToString(MToString *ins) } default: - // Objects might be effectful. (see ToPrimitive) + // Float32 and objects are not supported. MOZ_ASSUME_UNREACHABLE("unexpected type"); } } diff --git a/js/src/jit/MCallOptimize.cpp b/js/src/jit/MCallOptimize.cpp index d5642a893b29..294024bac103 100644 --- a/js/src/jit/MCallOptimize.cpp +++ b/js/src/jit/MCallOptimize.cpp @@ -165,6 +165,8 @@ IonBuilder::inlineNativeCall(CallInfo &callInfo, JSFunction *target) return inlineToObject(callInfo); if (native == intrinsic_ToInteger) return inlineToInteger(callInfo); + if (native == intrinsic_ToString) + return inlineToString(callInfo); // TypedObject intrinsics. if (native == intrinsic_ObjectIsTypedObject) @@ -1914,6 +1916,22 @@ IonBuilder::inlineToInteger(CallInfo &callInfo) return InliningStatus_Inlined; } +IonBuilder::InliningStatus +IonBuilder::inlineToString(CallInfo &callInfo) +{ + if (callInfo.argc() != 1 || callInfo.constructing()) + return InliningStatus_NotInlined; + + if (getInlineReturnType() != MIRType_String) + return InliningStatus_NotInlined; + + callInfo.setImplicitlyUsedUnchecked(); + MToString *toString = MToString::New(alloc(), callInfo.getArg(0)); + current->add(toString); + current->push(toString); + return InliningStatus_Inlined; +} + IonBuilder::InliningStatus IonBuilder::inlineBailout(CallInfo &callInfo) { diff --git a/js/src/jit/MIR.h b/js/src/jit/MIR.h index 8b4716f8250d..e6b9ce65da8e 100644 --- a/js/src/jit/MIR.h +++ b/js/src/jit/MIR.h @@ -3362,17 +3362,13 @@ class MTruncateToInt32 : public MUnaryInstruction }; // Converts any type to a string -class MToString : public MUnaryInstruction +class MToString : + public MUnaryInstruction, + public ToStringPolicy { explicit MToString(MDefinition *def) : MUnaryInstruction(def) { - // Converting an object to a string might be effectful. - JS_ASSERT(!def->mightBeType(MIRType_Object)); - - // NOP - JS_ASSERT(def->type() != MIRType_String); - setResultType(MIRType_String); setMovable(); } @@ -3386,13 +3382,21 @@ class MToString : public MUnaryInstruction MDefinition *foldsTo(TempAllocator &alloc, bool useValueNumbers); + TypePolicy *typePolicy() { + return this; + } + bool congruentTo(const MDefinition *ins) const { return congruentIfOperandsEqual(ins); } + AliasSet getAliasSet() const { - JS_ASSERT(!input()->mightBeType(MIRType_Object)); return AliasSet::None(); } + + bool fallible() const { + return input()->mightBeType(MIRType_Object); + } }; class MBitNot diff --git a/js/src/jit/TypePolicy.cpp b/js/src/jit/TypePolicy.cpp index 92c050064eff..a4a402c793d9 100644 --- a/js/src/jit/TypePolicy.cpp +++ b/js/src/jit/TypePolicy.cpp @@ -416,21 +416,13 @@ ConvertToStringPolicy::staticAdjustInputs(TempAllocator &alloc, MInstruction if (in->type() == MIRType_String) return true; - MInstruction *replace; - if (in->mightBeType(MIRType_Object)) { - if (in->type() != MIRType_Value) - in = boxAt(alloc, ins, in); - - replace = MUnbox::New(alloc, in, MIRType_String, MUnbox::Fallible); - } else { - // TODO remove these two lines once 966957 has landed - EnsureOperandNotFloat32(alloc, ins, Op); - in = ins->getOperand(Op); - replace = MToString::New(alloc, in); - } - + MToString *replace = MToString::New(alloc, in); ins->block()->insertBefore(ins, replace); ins->replaceOperand(Op, replace); + + if (!ToStringPolicy::staticAdjustInputs(alloc, replace)) + return false; + return true; } @@ -618,6 +610,22 @@ ToInt32Policy::staticAdjustInputs(TempAllocator &alloc, MInstruction *ins) return true; } +bool +ToStringPolicy::staticAdjustInputs(TempAllocator &alloc, MInstruction *ins) +{ + JS_ASSERT(ins->isToString()); + + if (ins->getOperand(0)->type() == MIRType_Object) { + ins->replaceOperand(0, boxAt(alloc, ins, ins->getOperand(0))); + return true; + } + + // TODO remove the following line once 966957 has landed + EnsureOperandNotFloat32(alloc, ins, 0); + + return true; +} + template bool ObjectPolicy::staticAdjustInputs(TempAllocator &alloc, MInstruction *ins) diff --git a/js/src/jit/TypePolicy.h b/js/src/jit/TypePolicy.h index 6c7d16688b78..6c8341e368b4 100644 --- a/js/src/jit/TypePolicy.h +++ b/js/src/jit/TypePolicy.h @@ -120,7 +120,7 @@ class StringPolicy : public BoxInputsPolicy // Expect a string for operand Op. Else a ToString instruction is inserted. template -class ConvertToStringPolicy : public BoxInputsPolicy +class ConvertToStringPolicy : public TypePolicy { public: static bool staticAdjustInputs(TempAllocator &alloc, MInstruction *def); @@ -221,6 +221,16 @@ class ToInt32Policy : public BoxInputsPolicy } }; +// Box objects as input to a ToString instruction. +class ToStringPolicy : public BoxInputsPolicy +{ + public: + static bool staticAdjustInputs(TempAllocator &alloc, MInstruction *def); + bool adjustInputs(TempAllocator &alloc, MInstruction *def) { + return staticAdjustInputs(alloc, def); + } +}; + template class ObjectPolicy : public BoxInputsPolicy { diff --git a/js/src/jit/arm/MacroAssembler-arm.h b/js/src/jit/arm/MacroAssembler-arm.h index d3efad1643c8..bebf075740c4 100644 --- a/js/src/jit/arm/MacroAssembler-arm.h +++ b/js/src/jit/arm/MacroAssembler-arm.h @@ -992,6 +992,10 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM ma_ldr(Operand(address.base, address.offset), ScratchRegister); branchTest32(cond, ScratchRegister, imm, label); } + void branchTest32(Condition cond, AbsoluteAddress address, Imm32 imm, Label *label) { + loadPtr(address, ScratchRegister); + branchTest32(cond, ScratchRegister, imm, label); + } void branchTestPtr(Condition cond, Register lhs, Register rhs, Label *label) { branchTest32(cond, lhs, rhs, label); } diff --git a/js/src/jit/shared/Assembler-x86-shared.h b/js/src/jit/shared/Assembler-x86-shared.h index 59c8df3edc0e..58c273689dd9 100644 --- a/js/src/jit/shared/Assembler-x86-shared.h +++ b/js/src/jit/shared/Assembler-x86-shared.h @@ -112,6 +112,16 @@ class Operand JS_ASSERT(kind() == MEM_ADDRESS32); return reinterpret_cast(disp_); } + + bool containsReg(Register r) const { + switch (kind()) { + case REG: return r.code() == reg(); + case MEM_REG_DISP: return r.code() == base(); + case MEM_SCALE: return r.code() == base() || r.code() == index(); + default: MOZ_CRASH("Unexpected Operand kind"); + } + return false; + } }; class AssemblerX86Shared : public AssemblerShared @@ -885,6 +895,9 @@ class AssemblerX86Shared : public AssemblerShared case Operand::MEM_REG_DISP: masm.testl_i32m(rhs.value, lhs.disp(), lhs.base()); break; + case Operand::MEM_ADDRESS32: + masm.testl_i32m(rhs.value, lhs.address()); + break; default: MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); break; diff --git a/js/src/jit/shared/Lowering-shared-inl.h b/js/src/jit/shared/Lowering-shared-inl.h index 59612193ab20..729397b74594 100644 --- a/js/src/jit/shared/Lowering-shared-inl.h +++ b/js/src/jit/shared/Lowering-shared-inl.h @@ -292,6 +292,14 @@ LIRGeneratorShared::useOrConstant(MDefinition *mir) return use(mir); } +LAllocation +LIRGeneratorShared::useOrConstantAtStart(MDefinition *mir) +{ + if (mir->isConstant()) + return LAllocation(mir->toConstant()->vp()); + return useAtStart(mir); +} + LAllocation LIRGeneratorShared::useRegisterOrConstant(MDefinition *mir) { diff --git a/js/src/jit/shared/Lowering-shared.h b/js/src/jit/shared/Lowering-shared.h index 26bd474c375f..1ea1b64090f0 100644 --- a/js/src/jit/shared/Lowering-shared.h +++ b/js/src/jit/shared/Lowering-shared.h @@ -76,6 +76,7 @@ class LIRGeneratorShared : public MInstructionVisitorWithDefaults inline LUse useFixed(MDefinition *mir, AnyRegister reg); inline LUse useFixedAtStart(MDefinition *mir, Register reg); inline LAllocation useOrConstant(MDefinition *mir); + inline LAllocation useOrConstantAtStart(MDefinition *mir); // "Any" is architecture dependent, and will include registers and stack slots on X86, // and only registers on ARM. inline LAllocation useAny(MDefinition *mir); diff --git a/js/src/jit/shared/Lowering-x86-shared.cpp b/js/src/jit/shared/Lowering-x86-shared.cpp index 83ce02e9d139..14f12b772e8f 100644 --- a/js/src/jit/shared/Lowering-x86-shared.cpp +++ b/js/src/jit/shared/Lowering-x86-shared.cpp @@ -36,7 +36,7 @@ LIRGeneratorX86Shared::visitGuardShape(MGuardShape *ins) { JS_ASSERT(ins->obj()->type() == MIRType_Object); - LGuardShape *guard = new(alloc()) LGuardShape(useRegister(ins->obj())); + LGuardShape *guard = new(alloc()) LGuardShape(useRegisterAtStart(ins->obj())); if (!assignSnapshot(guard, ins->bailoutKind())) return false; if (!add(guard, ins)) @@ -49,7 +49,7 @@ LIRGeneratorX86Shared::visitGuardObjectType(MGuardObjectType *ins) { JS_ASSERT(ins->obj()->type() == MIRType_Object); - LGuardObjectType *guard = new(alloc()) LGuardObjectType(useRegister(ins->obj())); + LGuardObjectType *guard = new(alloc()) LGuardObjectType(useRegisterAtStart(ins->obj())); if (!assignSnapshot(guard)) return false; if (!add(guard, ins)) @@ -75,9 +75,9 @@ LIRGeneratorX86Shared::lowerForShift(LInstructionHelper<1, 2, 0> *ins, MDefiniti // shift operator should be constant or in register ecx // x86 can't shift a non-ecx register if (rhs->isConstant()) - ins->setOperand(1, useOrConstant(rhs)); + ins->setOperand(1, useOrConstantAtStart(rhs)); else - ins->setOperand(1, useFixed(rhs, ecx)); + ins->setOperand(1, lhs != rhs ? useFixed(rhs, ecx) : useFixedAtStart(rhs, ecx)); return defineReuseInput(ins, mir, 0); } @@ -95,7 +95,7 @@ LIRGeneratorX86Shared::lowerForALU(LInstructionHelper<1, 2, 0> *ins, MDefinition MDefinition *lhs, MDefinition *rhs) { ins->setOperand(0, useRegisterAtStart(lhs)); - ins->setOperand(1, useOrConstant(rhs)); + ins->setOperand(1, lhs != rhs ? useOrConstant(rhs) : useOrConstantAtStart(rhs)); return defineReuseInput(ins, mir, 0); } @@ -103,7 +103,7 @@ bool LIRGeneratorX86Shared::lowerForFPU(LInstructionHelper<1, 2, 0> *ins, MDefinition *mir, MDefinition *lhs, MDefinition *rhs) { ins->setOperand(0, useRegisterAtStart(lhs)); - ins->setOperand(1, use(rhs)); + ins->setOperand(1, lhs != rhs ? use(rhs) : useAtStart(rhs)); return defineReuseInput(ins, mir, 0); } diff --git a/js/src/jit/x64/Assembler-x64.h b/js/src/jit/x64/Assembler-x64.h index e6ffc72d6f03..6cd9239f6dab 100644 --- a/js/src/jit/x64/Assembler-x64.h +++ b/js/src/jit/x64/Assembler-x64.h @@ -404,6 +404,24 @@ class Assembler : public AssemblerX86Shared void andq(Imm32 imm, Register dest) { masm.andq_ir(imm.value, dest.code()); } + void andq(const Operand &src, Register dest) { + switch (src.kind()) { + case Operand::REG: + masm.andq_rr(src.reg(), dest.code()); + break; + case Operand::MEM_REG_DISP: + masm.andq_mr(src.disp(), src.base(), dest.code()); + break; + case Operand::MEM_SCALE: + masm.andq_mr(src.disp(), src.base(), src.index(), src.scale(), dest.code()); + break; + case Operand::MEM_ADDRESS32: + masm.andq_mr(src.address(), dest.code()); + break; + default: + MOZ_ASSUME_UNREACHABLE("unexpected operand kind"); + } + } void addq(Imm32 imm, Register dest) { masm.addq_ir(imm.value, dest.code()); diff --git a/js/src/jit/x64/MacroAssembler-x64.h b/js/src/jit/x64/MacroAssembler-x64.h index 98589bcd8f53..f1852891f8b6 100644 --- a/js/src/jit/x64/MacroAssembler-x64.h +++ b/js/src/jit/x64/MacroAssembler-x64.h @@ -78,6 +78,7 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared using MacroAssemblerX86Shared::Pop; using MacroAssemblerX86Shared::callWithExitFrame; using MacroAssemblerX86Shared::branch32; + using MacroAssemblerX86Shared::branchTest32; using MacroAssemblerX86Shared::load32; using MacroAssemblerX86Shared::store32; @@ -454,8 +455,12 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared void cmpPtr(Register lhs, const ImmWord rhs) { JS_ASSERT(lhs != ScratchReg); - mov(rhs, ScratchReg); - cmpq(lhs, ScratchReg); + if (intptr_t(rhs.value) <= INT32_MAX && intptr_t(rhs.value) >= INT32_MIN) { + cmpq(lhs, Imm32(int32_t(rhs.value))); + } else { + movq(rhs, ScratchReg); + cmpq(lhs, ScratchReg); + } } void cmpPtr(Register lhs, const ImmPtr rhs) { cmpPtr(lhs, ImmWord(uintptr_t(rhs.value))); @@ -469,6 +474,7 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared cmpq(lhs, rhs); } void cmpPtr(const Operand &lhs, const ImmGCPtr rhs) { + JS_ASSERT(!lhs.containsReg(ScratchReg)); movq(rhs, ScratchReg); cmpq(lhs, ScratchReg); } @@ -592,6 +598,15 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared branch32(cond, Address(ScratchReg, 0), rhs, label); } } + void branchTest32(Condition cond, AbsoluteAddress address, Imm32 imm, Label *label) { + if (JSC::X86Assembler::isAddressImmediate(address.addr)) { + testl(Operand(address), imm); + } else { + mov(ImmPtr(address.addr), ScratchReg); + testl(Operand(ScratchReg, 0), imm); + } + j(cond, label); + } // Specialization for AbsoluteAddress. void branchPtr(Condition cond, AbsoluteAddress addr, Register ptr, Label *label) { @@ -1072,9 +1087,14 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared void unboxNonDouble(const Operand &src, Register dest) { // Explicitly permits |dest| to be used in |src|. JS_ASSERT(dest != ScratchReg); - mov(ImmWord(JSVAL_PAYLOAD_MASK), ScratchReg); - movq(src, dest); - andq(ScratchReg, dest); + if (src.containsReg(dest)) { + mov(ImmWord(JSVAL_PAYLOAD_MASK), ScratchReg); + movq(src, dest); + andq(ScratchReg, dest); + } else { + mov(ImmWord(JSVAL_PAYLOAD_MASK), dest); + andq(src, dest); + } } void unboxString(const ValueOperand &src, Register dest) { unboxNonDouble(src, dest); } diff --git a/js/src/jit/x86/MacroAssembler-x86.h b/js/src/jit/x86/MacroAssembler-x86.h index ea2ac6ff3daf..6d5b7d9f7a46 100644 --- a/js/src/jit/x86/MacroAssembler-x86.h +++ b/js/src/jit/x86/MacroAssembler-x86.h @@ -69,6 +69,7 @@ class MacroAssemblerX86 : public MacroAssemblerX86Shared using MacroAssemblerX86Shared::Pop; using MacroAssemblerX86Shared::callWithExitFrame; using MacroAssemblerX86Shared::branch32; + using MacroAssemblerX86Shared::branchTest32; using MacroAssemblerX86Shared::load32; using MacroAssemblerX86Shared::store32; using MacroAssemblerX86Shared::call; @@ -593,6 +594,10 @@ class MacroAssemblerX86 : public MacroAssemblerX86Shared cmpl(Operand(lhs), rhs); j(cond, label); } + void branchTest32(Condition cond, AbsoluteAddress address, Imm32 imm, Label *label) { + testl(Operand(address), imm); + j(cond, label); + } // Specialization for AsmJSAbsoluteAddress. void branchPtr(Condition cond, AsmJSAbsoluteAddress lhs, Register ptr, Label *label) { diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index fb46efcb9637..37531a032629 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -1002,6 +1002,7 @@ class ContextAllocPolicy /* Exposed intrinsics so that Ion may inline them. */ bool intrinsic_ToObject(JSContext *cx, unsigned argc, Value *vp); bool intrinsic_ToInteger(JSContext *cx, unsigned argc, Value *vp); +bool intrinsic_ToString(JSContext *cx, unsigned argc, Value *vp); bool intrinsic_IsCallable(JSContext *cx, unsigned argc, Value *vp); bool intrinsic_ThrowError(JSContext *cx, unsigned argc, Value *vp); bool intrinsic_NewDenseArray(JSContext *cx, unsigned argc, Value *vp); diff --git a/js/src/vm/SelfHosting.cpp b/js/src/vm/SelfHosting.cpp index 3c76f0e451a9..638532c28f8c 100644 --- a/js/src/vm/SelfHosting.cpp +++ b/js/src/vm/SelfHosting.cpp @@ -73,13 +73,13 @@ js::intrinsic_ToInteger(JSContext *cx, unsigned argc, Value *vp) } bool -intrinsic_ToString(JSContext *cx, unsigned argc, Value *vp) +js::intrinsic_ToString(JSContext *cx, unsigned argc, Value *vp) { CallArgs args = CallArgsFromVp(argc, vp); RootedString str(cx); str = ToString(cx, args[0]); - if (!str) - return false; + if (!str) + return false; args.rval().setString(str); return true; } diff --git a/js/xpconnect/src/XPCJSRuntime.cpp b/js/xpconnect/src/XPCJSRuntime.cpp index e41294ca13c0..a6687048c5b6 100644 --- a/js/xpconnect/src/XPCJSRuntime.cpp +++ b/js/xpconnect/src/XPCJSRuntime.cpp @@ -3129,9 +3129,9 @@ XPCJSRuntime::XPCJSRuntime(nsXPConnect* aXPConnect) #if defined(XP_MACOSX) || defined(DARWIN) // MacOS has a gargantuan default stack size of 8MB. Go wild with 7MB, - // and give trusted script 140k extra. The stack is huge on mac anyway. + // and give trusted script 180k extra. The stack is huge on mac anyway. const size_t kStackQuota = 7 * 1024 * 1024; - const size_t kTrustedScriptBuffer = 140 * 1024; + const size_t kTrustedScriptBuffer = 180 * 1024; #elif defined(MOZ_ASAN) // ASan requires more stack space due to red-zones, so give it double the // default (2MB on 32-bit, 4MB on 64-bit). ASAN stack frame measurements diff --git a/layout/base/nsDisplayList.cpp b/layout/base/nsDisplayList.cpp index fdac791be3fc..9dfbc485a9fb 100644 --- a/layout/base/nsDisplayList.cpp +++ b/layout/base/nsDisplayList.cpp @@ -62,6 +62,7 @@ using namespace mozilla; using namespace mozilla::css; using namespace mozilla::layers; using namespace mozilla::dom; +using namespace mozilla::layout; typedef FrameMetrics::ViewID ViewID; static inline nsIFrame* @@ -1365,6 +1366,8 @@ void nsDisplayList::PaintForFrame(nsDisplayListBuilder* aBuilder, } } + MaybeSetupTransactionIdAllocator(layerManager, view); + layerManager->EndTransaction(FrameLayerBuilder::DrawThebesLayer, aBuilder, flags); aBuilder->SetIsCompositingCheap(temp); diff --git a/layout/base/nsLayoutUtils.cpp b/layout/base/nsLayoutUtils.cpp index 2a248e6e5954..fd786e03d15e 100644 --- a/layout/base/nsLayoutUtils.cpp +++ b/layout/base/nsLayoutUtils.cpp @@ -20,6 +20,7 @@ #include "nsCSSAnonBoxes.h" #include "nsCSSColorUtils.h" #include "nsView.h" +#include "nsViewManager.h" #include "nsPlaceholderFrame.h" #include "nsIScrollableFrame.h" #include "nsIDOMEvent.h" @@ -78,6 +79,7 @@ #include "UnitTransforms.h" #include "TiledLayerBuffer.h" // For TILEDLAYERBUFFER_TILE_SIZE #include "ClientLayerManager.h" +#include "nsRefreshDriver.h" #include "mozilla/Preferences.h" @@ -6635,3 +6637,19 @@ AutoMaybeDisableFontInflation::~AutoMaybeDisableFontInflation() mPresContext->mInflationDisabledForShrinkWrap = mOldValue; } } + +namespace mozilla { +namespace layout { + +void +MaybeSetupTransactionIdAllocator(layers::LayerManager* aManager, nsView* aView) +{ + if (aManager->GetBackendType() == layers::LayersBackend::LAYERS_CLIENT) { + layers::ClientLayerManager *manager = static_cast(aManager); + nsRefreshDriver *refresh = aView->GetViewManager()->GetPresShell()->GetPresContext()->RefreshDriver(); + manager->SetTransactionIdAllocator(refresh); + } +} + +} +} diff --git a/layout/base/nsLayoutUtils.h b/layout/base/nsLayoutUtils.h index b4534ccaf88d..149006166752 100644 --- a/layout/base/nsLayoutUtils.h +++ b/layout/base/nsLayoutUtils.h @@ -75,6 +75,7 @@ class HTMLVideoElement; } // namespace dom namespace layers { class Layer; +class ClientLayerManager; } } @@ -2310,6 +2311,8 @@ namespace mozilla { bool mOldValue; }; + void MaybeSetupTransactionIdAllocator(layers::LayerManager* aManager, nsView* aView); + } } diff --git a/layout/base/nsPresShell.cpp b/layout/base/nsPresShell.cpp index f43197649862..65b9a6155598 100644 --- a/layout/base/nsPresShell.cpp +++ b/layout/base/nsPresShell.cpp @@ -189,6 +189,7 @@ using namespace mozilla::dom; using namespace mozilla::gfx; using namespace mozilla::layers; using namespace mozilla::gfx; +using namespace mozilla::layout; CapturingContentInfo nsIPresShell::gCaptureInfo = { false /* mAllowed */, false /* mPointerLock */, false /* mRetargetToElement */, @@ -4037,12 +4038,15 @@ PresShell::FlushPendingNotifications(mozilla::ChangesToFlush aFlush) } if (aFlush.mFlushAnimations && - nsLayoutUtils::AreAsyncAnimationsEnabled() && !mPresContext->StyleUpdateForAllAnimationsIsUpToDate()) { - mPresContext->AnimationManager()-> - FlushAnimations(CommonAnimationManager::Cannot_Throttle); - mPresContext->TransitionManager()-> - FlushTransitions(CommonAnimationManager::Cannot_Throttle); + if (mPresContext->AnimationManager()) { + mPresContext->AnimationManager()-> + FlushAnimations(CommonAnimationManager::Cannot_Throttle); + } + if (mPresContext->TransitionManager()) { + mPresContext->TransitionManager()-> + FlushTransitions(CommonAnimationManager::Cannot_Throttle); + } mPresContext->TickLastStyleUpdateForAllAnimations(); } @@ -5871,6 +5875,8 @@ PresShell::Paint(nsView* aViewToPaint, LayerProperties::CloneFrom(layerManager->GetRoot()) : nullptr); + MaybeSetupTransactionIdAllocator(layerManager, aViewToPaint); + if (layerManager->EndEmptyTransaction((aFlags & PAINT_COMPOSITE) ? LayerManager::END_DEFAULT : LayerManager::END_NO_COMPOSITE)) { nsIntRegion invalid; @@ -5931,6 +5937,7 @@ PresShell::Paint(nsView* aViewToPaint, root->SetVisibleRegion(bounds); layerManager->SetRoot(root); } + MaybeSetupTransactionIdAllocator(layerManager, aViewToPaint); layerManager->EndTransaction(nullptr, nullptr, (aFlags & PAINT_COMPOSITE) ? LayerManager::END_DEFAULT : LayerManager::END_NO_COMPOSITE); } diff --git a/layout/base/nsRefreshDriver.cpp b/layout/base/nsRefreshDriver.cpp index 3c1cf84efd90..e4a8db9f956a 100644 --- a/layout/base/nsRefreshDriver.cpp +++ b/layout/base/nsRefreshDriver.cpp @@ -686,12 +686,17 @@ nsRefreshDriver::nsRefreshDriver(nsPresContext* aPresContext) mReflowCause(nullptr), mStyleCause(nullptr), mPresContext(aPresContext), + mRootRefresh(nullptr), + mPendingTransaction(0), + mCompletedTransaction(0), mFreezeCount(0), mThrottled(false), mTestControllingRefreshes(false), mViewManagerFlushIsPending(false), mRequestedHighPrecision(false), - mInRefresh(false) + mInRefresh(false), + mWaitingForTransaction(false), + mSkippedPaints(0) { mMostRecentRefreshEpochTime = JS_Now(); mMostRecentRefresh = TimeStamp::Now(); @@ -703,6 +708,10 @@ nsRefreshDriver::~nsRefreshDriver() "observers should have unregistered"); NS_ABORT_IF_FALSE(!mActiveTimer, "timer should be gone"); + if (mRootRefresh) { + mRootRefresh->RemoveRefreshObserver(this, Flush_Style); + mRootRefresh = nullptr; + } for (uint32_t i = 0; i < mPresShellsToInvalidateIfHidden.Length(); i++) { mPresShellsToInvalidateIfHidden[i]->InvalidatePresShellIfHidden(); } @@ -725,6 +734,11 @@ nsRefreshDriver::AdvanceTimeAndRefresh(int64_t aMilliseconds) mMostRecentRefresh = TimeStamp::Now(); mTestControllingRefreshes = true; + if (mWaitingForTransaction) { + // Disable any refresh driver throttling when entering test mode + mWaitingForTransaction = false; + mSkippedPaints = 0; + } } mMostRecentRefreshEpochTime += aMilliseconds * 1000; @@ -739,6 +753,7 @@ nsRefreshDriver::RestoreNormalRefresh() { mTestControllingRefreshes = false; EnsureTimerStarted(false); + mCompletedTransaction = mPendingTransaction; } TimeStamp @@ -1001,12 +1016,6 @@ nsRefreshDriver::ArrayFor(mozFlushType aFlushType) } } -/* - * nsISupports implementation - */ - -NS_IMPL_ISUPPORTS(nsRefreshDriver, nsISupports) - /* * nsITimerCallback implementation */ @@ -1061,6 +1070,18 @@ nsRefreshDriver::Tick(int64_t aNowEpoch, TimeStamp aNowTime) mMostRecentRefresh = aNowTime; mMostRecentRefreshEpochTime = aNowEpoch; + if (IsWaitingForPaint()) { + // We're currently suspended waiting for earlier Tick's to + // be completed (on the Compositor). Mark that we missed the paint + // and keep waiting. + return; + } + if (mRootRefresh) { + mRootRefresh->RemoveRefreshObserver(this, Flush_Style); + mRootRefresh = nullptr; + } + mSkippedPaints = 0; + nsCOMPtr presShell = mPresContext->GetPresShell(); if (!presShell || (ObserverCount() == 0 && ImageRequestCount() == 0)) { // Things are being destroyed, or we no longer have any observers. @@ -1096,6 +1117,8 @@ nsRefreshDriver::Tick(int64_t aNowEpoch, TimeStamp aNowTime) } if (i == 0) { + // This is the Flush_Style case. + // Grab all of our frame request callbacks up front. nsTArray frameRequestCallbacks(mFrameRequestCallbackDocs.Length()); @@ -1137,7 +1160,6 @@ nsRefreshDriver::Tick(int64_t aNowEpoch, TimeStamp aNowTime) } profiler_tracing("Paint", "Scripts", TRACING_INTERVAL_END); - // This is the Flush_Style case. if (mPresContext && mPresContext->GetPresShell()) { bool tracingStyleFlush = false; nsAutoTArray observers; @@ -1167,6 +1189,10 @@ nsRefreshDriver::Tick(int64_t aNowEpoch, TimeStamp aNowTime) profiler_tracing("Paint", "Styles", TRACING_INTERVAL_END); } } + + if (!nsLayoutUtils::AreAsyncAnimationsEnabled()) { + mPresContext->TickLastStyleUpdateForAllAnimations(); + } } else if (i == 1) { // This is the Flush_Layout case. if (mPresContext && mPresContext->GetPresShell()) { @@ -1355,6 +1381,114 @@ nsRefreshDriver::Thaw() } } +void +nsRefreshDriver::FinishedWaitingForTransaction() +{ + mWaitingForTransaction = false; + if (mSkippedPaints && + !IsInRefresh() && + (ObserverCount() || ImageRequestCount())) { + DoRefresh(); + } + mSkippedPaints = 0; +} + +uint64_t +nsRefreshDriver::GetTransactionId() +{ + ++mPendingTransaction; + + if (mPendingTransaction == mCompletedTransaction + 2 && + !mWaitingForTransaction && + !mTestControllingRefreshes) { + mWaitingForTransaction = true; + mSkippedPaints = 0; + } + + return mPendingTransaction; +} + +void +nsRefreshDriver::RevokeTransactionId(uint64_t aTransactionId) +{ + MOZ_ASSERT(aTransactionId == mPendingTransaction); + if (mPendingTransaction == mCompletedTransaction + 2 && + mWaitingForTransaction) { + MOZ_ASSERT(!mSkippedPaints, "How did we skip a paint when we're in the middle of one?"); + FinishedWaitingForTransaction(); + } + mPendingTransaction--; +} + +void +nsRefreshDriver::NotifyTransactionCompleted(uint64_t aTransactionId) +{ + if (aTransactionId > mCompletedTransaction) { + if (mPendingTransaction > mCompletedTransaction + 1 && + mWaitingForTransaction) { + mCompletedTransaction = aTransactionId; + FinishedWaitingForTransaction(); + } else { + mCompletedTransaction = aTransactionId; + } + } +} + +void +nsRefreshDriver::WillRefresh(mozilla::TimeStamp aTime) +{ + mRootRefresh->RemoveRefreshObserver(this, Flush_Style); + mRootRefresh = nullptr; + if (mSkippedPaints) { + DoRefresh(); + } +} + +bool +nsRefreshDriver::IsWaitingForPaint() +{ + if (mTestControllingRefreshes) { + return false; + } + // If we've skipped too many ticks then it's possible + // that something went wrong and we're waiting on + // a notification that will never arrive. + static const uint32_t kMaxSkippedPaints = 10; + if (mSkippedPaints > kMaxSkippedPaints) { + mSkippedPaints = 0; + mWaitingForTransaction = false; + if (mRootRefresh) { + mRootRefresh->RemoveRefreshObserver(this, Flush_Style); + } + return false; + } + if (mWaitingForTransaction) { + mSkippedPaints++; + return true; + } + + // Try find the 'root' refresh driver for the current window and check + // if that is waiting for a paint. + nsPresContext *displayRoot = PresContext()->GetDisplayRootPresContext(); + if (displayRoot) { + nsRefreshDriver *rootRefresh = displayRoot->GetRootPresContext()->RefreshDriver(); + if (rootRefresh && rootRefresh != this) { + if (rootRefresh->IsWaitingForPaint()) { + if (mRootRefresh != rootRefresh) { + if (mRootRefresh) { + mRootRefresh->RemoveRefreshObserver(this, Flush_Style); + } + rootRefresh->AddRefreshObserver(this, Flush_Style); + mRootRefresh = rootRefresh; + } + mSkippedPaints++; + return true; + } + } + } + return false; +} + void nsRefreshDriver::SetThrottled(bool aThrottled) { diff --git a/layout/base/nsRefreshDriver.h b/layout/base/nsRefreshDriver.h index 887b1ece11eb..5c51df7b7e2e 100644 --- a/layout/base/nsRefreshDriver.h +++ b/layout/base/nsRefreshDriver.h @@ -22,6 +22,7 @@ #include "mozilla/Attributes.h" #include "mozilla/Maybe.h" #include "GeckoProfiler.h" +#include "mozilla/layers/TransactionIdAllocator.h" class nsPresContext; class nsIPresShell; @@ -62,7 +63,8 @@ public: virtual void DidRefresh() = 0; }; -class nsRefreshDriver MOZ_FINAL : public nsISupports { +class nsRefreshDriver MOZ_FINAL : public mozilla::layers::TransactionIdAllocator, + public nsARefreshObserver { public: nsRefreshDriver(nsPresContext *aPresContext); ~nsRefreshDriver(); @@ -70,9 +72,6 @@ public: static void InitializeStatics(); static void Shutdown(); - // nsISupports implementation - NS_DECL_ISUPPORTS - /** * Methods for testing, exposed via nsIDOMWindowUtils. See * nsIDOMWindowUtils.advanceTimeAndRefresh for description. @@ -272,6 +271,17 @@ public: bool IsInRefresh() { return mInRefresh; } + // mozilla::layers::TransactionIdAllocator + virtual uint64_t GetTransactionId() MOZ_OVERRIDE; + void NotifyTransactionCompleted(uint64_t aTransactionId) MOZ_OVERRIDE; + void RevokeTransactionId(uint64_t aTransactionId) MOZ_OVERRIDE; + + bool IsWaitingForPaint(); + + // nsARefreshObserver + NS_IMETHOD_(MozExternalRefCountType) AddRef(void) { return TransactionIdAllocator::AddRef(); } + NS_IMETHOD_(MozExternalRefCountType) Release(void) { return TransactionIdAllocator::Release(); } + virtual void WillRefresh(mozilla::TimeStamp aTime); private: typedef nsTObserverArray ObserverArray; typedef nsTHashtable RequestTable; @@ -314,6 +324,8 @@ private: return mFrameRequestCallbackDocs.Length() != 0; } + void FinishedWaitingForTransaction(); + mozilla::RefreshDriverTimer* ChooseTimer() const; mozilla::RefreshDriverTimer *mActiveTimer; @@ -323,6 +335,13 @@ private: nsPresContext *mPresContext; // weak; pres context passed in constructor // and unset in Disconnect + nsRefPtr mRootRefresh; + + // The most recently allocated transaction id. + uint64_t mPendingTransaction; + // The most recently completed transaction id. + uint64_t mCompletedTransaction; + uint32_t mFreezeCount; bool mThrottled; bool mTestControllingRefreshes; @@ -330,6 +349,14 @@ private: bool mRequestedHighPrecision; bool mInRefresh; + // True if the refresh driver is suspended waiting for transaction + // id's to be returned and shouldn't do any work during Tick(). + bool mWaitingForTransaction; + // True if Tick() was skipped because of mWaitingForTransaction and + // we should schedule a new Tick immediately when resumed instead + // of waiting until the next interval. + uint32_t mSkippedPaints; + int64_t mMostRecentRefreshEpochTime; mozilla::TimeStamp mMostRecentRefresh; diff --git a/layout/build/nsLayoutModule.cpp b/layout/build/nsLayoutModule.cpp index fb9aab5ff7a1..74e25f637637 100644 --- a/layout/build/nsLayoutModule.cpp +++ b/layout/build/nsLayoutModule.cpp @@ -229,6 +229,8 @@ static void Shutdown(); #include "AudioChannelService.h" +#include "mozilla/dom/DataStoreService.h" + #include "mozilla/dom/power/PowerManagerService.h" #include "mozilla/dom/alarm/AlarmHalService.h" #include "mozilla/dom/time/TimeService.h" @@ -584,10 +586,15 @@ NS_GENERIC_FACTORY_CONSTRUCTOR_INIT(Geolocation, Init) #define NS_AUDIOCHANNEL_SERVICE_CID \ { 0xf712e983, 0x048a, 0x443f, { 0x88, 0x02, 0xfc, 0xc3, 0xd9, 0x27, 0xce, 0xac }} +#define NS_DATASTORE_SERVICE_CID \ + { 0x0d4285fe, 0xf1b3, 0x49fa, { 0xbc, 0x51, 0xa4, 0xa8, 0x3f, 0x0a, 0xaf, 0x85 }} + NS_GENERIC_FACTORY_SINGLETON_CONSTRUCTOR(nsGeolocationService, nsGeolocationService::GetGeolocationService) NS_GENERIC_FACTORY_SINGLETON_CONSTRUCTOR(AudioChannelService, AudioChannelService::GetAudioChannelService) +NS_GENERIC_FACTORY_SINGLETON_CONSTRUCTOR(DataStoreService, DataStoreService::GetOrCreate) + #ifdef MOZ_WEBSPEECH NS_GENERIC_FACTORY_CONSTRUCTOR(FakeSpeechRecognitionService) #endif @@ -736,6 +743,7 @@ NS_DEFINE_NAMED_CID(NS_TEXTSERVICESDOCUMENT_CID); NS_DEFINE_NAMED_CID(NS_GEOLOCATION_SERVICE_CID); NS_DEFINE_NAMED_CID(NS_GEOLOCATION_CID); NS_DEFINE_NAMED_CID(NS_AUDIOCHANNEL_SERVICE_CID); +NS_DEFINE_NAMED_CID(NS_DATASTORE_SERVICE_CID); NS_DEFINE_NAMED_CID(NS_FOCUSMANAGER_CID); NS_DEFINE_NAMED_CID(CSPSERVICE_CID); NS_DEFINE_NAMED_CID(NS_CSPCONTEXT_CID); @@ -1023,6 +1031,7 @@ static const mozilla::Module::CIDEntry kLayoutCIDs[] = { { &kNS_GEOLOCATION_SERVICE_CID, false, nullptr, nsGeolocationServiceConstructor }, { &kNS_GEOLOCATION_CID, false, nullptr, GeolocationConstructor }, { &kNS_AUDIOCHANNEL_SERVICE_CID, false, nullptr, AudioChannelServiceConstructor }, + { &kNS_DATASTORE_SERVICE_CID, false, nullptr, DataStoreServiceConstructor }, { &kNS_FOCUSMANAGER_CID, false, nullptr, CreateFocusManager }, #ifdef MOZ_WEBSPEECH { &kNS_FAKE_SPEECH_RECOGNITION_SERVICE_CID, false, nullptr, FakeSpeechRecognitionServiceConstructor }, @@ -1177,6 +1186,7 @@ static const mozilla::Module::ContractIDEntry kLayoutContracts[] = { { "@mozilla.org/geolocation/service;1", &kNS_GEOLOCATION_SERVICE_CID }, { "@mozilla.org/geolocation;1", &kNS_GEOLOCATION_CID }, { "@mozilla.org/audiochannel/service;1", &kNS_AUDIOCHANNEL_SERVICE_CID }, + { "@mozilla.org/datastore-service;1", &kNS_DATASTORE_SERVICE_CID }, { "@mozilla.org/focus-manager;1", &kNS_FOCUSMANAGER_CID }, #ifdef MOZ_WEBSPEECH { NS_SPEECH_RECOGNITION_SERVICE_CONTRACTID_PREFIX "fake", &kNS_FAKE_SPEECH_RECOGNITION_SERVICE_CID }, diff --git a/layout/build/nsLayoutStatics.cpp b/layout/build/nsLayoutStatics.cpp index 892c92278555..e1b5e7f53ce6 100644 --- a/layout/build/nsLayoutStatics.cpp +++ b/layout/build/nsLayoutStatics.cpp @@ -65,6 +65,7 @@ #include "ActiveLayerTracker.h" #include "AudioChannelService.h" +#include "mozilla/dom/DataStoreService.h" #ifdef MOZ_XUL #include "nsXULPopupManager.h" @@ -411,6 +412,8 @@ nsLayoutStatics::Shutdown() AudioChannelService::Shutdown(); + DataStoreService::Shutdown(); + ContentParent::ShutDown(); nsRefreshDriver::Shutdown(); diff --git a/layout/generic/nsBlockFrame.cpp b/layout/generic/nsBlockFrame.cpp index db20087b6a37..48c94927d454 100644 --- a/layout/generic/nsBlockFrame.cpp +++ b/layout/generic/nsBlockFrame.cpp @@ -6545,12 +6545,15 @@ nsBlockFrame::GetBulletText(nsAString& aText) const if (myList->GetListStyleImage() || myList->mListStyleType == NS_STYLE_LIST_STYLE_DISC) { aText.Assign(kDiscCharacter); + aText.Append(' '); } else if (myList->mListStyleType == NS_STYLE_LIST_STYLE_CIRCLE) { aText.Assign(kCircleCharacter); + aText.Append(' '); } else if (myList->mListStyleType == NS_STYLE_LIST_STYLE_SQUARE) { aText.Assign(kSquareCharacter); + aText.Append(' '); } else if (myList->mListStyleType != NS_STYLE_LIST_STYLE_NONE) { nsBulletFrame* bullet = GetBullet(); diff --git a/layout/generic/nsBulletFrame.cpp b/layout/generic/nsBulletFrame.cpp index f66bbeca9d53..08a58a74f4d3 100644 --- a/layout/generic/nsBulletFrame.cpp +++ b/layout/generic/nsBulletFrame.cpp @@ -19,6 +19,7 @@ #include "prprf.h" #include "nsDisplayList.h" #include "nsCounterManager.h" +#include "nsBidiUtils.h" #include "imgIContainer.h" #include "imgRequestProxy.h" @@ -312,8 +313,6 @@ nsBulletFrame::PaintBullet(nsRenderingContext& aRenderingContext, nsPoint aPt, nsRefPtr fm; aRenderingContext.SetColor(nsLayoutUtils::GetColor(this, eCSSProperty_color)); - mTextIsRTL = false; - nsAutoString text; switch (listStyleType) { case NS_STYLE_LIST_STYLE_NONE: @@ -415,12 +414,15 @@ nsBulletFrame::PaintBullet(nsRenderingContext& aRenderingContext, nsPoint aPt, GetListItemText(*myList, text); aRenderingContext.SetFont(fm); nscoord ascent = fm->MaxAscent(); - aRenderingContext.SetTextRunRTL(mTextIsRTL); - aRenderingContext.DrawString( - text, mPadding.left + aPt.x, - NSToCoordRound(nsLayoutUtils::GetSnappedBaselineY( - this, aRenderingContext.ThebesContext(), - mPadding.top + aPt.y, ascent))); + aPt.MoveBy(mPadding.left, mPadding.top); + aPt.y = NSToCoordRound(nsLayoutUtils::GetSnappedBaselineY( + this, aRenderingContext.ThebesContext(), aPt.y, ascent)); + nsPresContext* presContext = PresContext(); + if (!presContext->BidiEnabled() && HasRTLChars(text)) { + presContext->SetBidiEnabled(); + } + nsLayoutUtils::DrawString(this, &aRenderingContext, + text.get(), text.Length(), aPt); break; } } @@ -1454,18 +1456,16 @@ nsBulletFrame::AppendCounterText(int32_t aListStyleType, /* static */ void nsBulletFrame::GetListItemSuffix(int32_t aListStyleType, - nsString& aResult, - bool& aSuppressPadding) + nsString& aResult) { - aResult = '.'; - aSuppressPadding = false; + aResult.AssignLiteral(MOZ_UTF16(". ")); switch (aListStyleType) { case NS_STYLE_LIST_STYLE_NONE: // used by counters code only case NS_STYLE_LIST_STYLE_DISC: // used by counters code only case NS_STYLE_LIST_STYLE_CIRCLE: // used by counters code only case NS_STYLE_LIST_STYLE_SQUARE: // used by counters code only - aResult.Truncate(); + aResult = ' '; break; case NS_STYLE_LIST_STYLE_CJK_DECIMAL: @@ -1485,7 +1485,6 @@ nsBulletFrame::GetListItemSuffix(int32_t aListStyleType, case NS_STYLE_LIST_STYLE_MOZ_CJK_HEAVENLY_STEM: case NS_STYLE_LIST_STYLE_MOZ_CJK_EARTHLY_BRANCH: aResult = 0x3001; - aSuppressPadding = true; break; case NS_STYLE_LIST_STYLE_KOREAN_HANGUL_FORMAL: @@ -1493,7 +1492,7 @@ nsBulletFrame::GetListItemSuffix(int32_t aListStyleType, case NS_STYLE_LIST_STYLE_KOREAN_HANJA_FORMAL: case NS_STYLE_LIST_STYLE_MOZ_HANGUL: case NS_STYLE_LIST_STYLE_MOZ_HANGUL_CONSONANT: - aResult = ','; + aResult.AssignLiteral(MOZ_UTF16(", ")); break; } } @@ -1502,27 +1501,30 @@ void nsBulletFrame::GetListItemText(const nsStyleList& aListStyle, nsString& result) { - const nsStyleVisibility* vis = StyleVisibility(); - NS_ASSERTION(aListStyle.mListStyleType != NS_STYLE_LIST_STYLE_NONE && aListStyle.mListStyleType != NS_STYLE_LIST_STYLE_DISC && aListStyle.mListStyleType != NS_STYLE_LIST_STYLE_CIRCLE && aListStyle.mListStyleType != NS_STYLE_LIST_STYLE_SQUARE, "we should be using specialized code for these types"); - result.Truncate(); - AppendCounterText(aListStyle.mListStyleType, mOrdinal, result, mTextIsRTL); + bool isRTL; + nsAutoString number; + AppendCounterText(aListStyle.mListStyleType, mOrdinal, number, isRTL); nsAutoString suffix; - GetListItemSuffix(aListStyle.mListStyleType, suffix, mSuppressPadding); + GetListItemSuffix(aListStyle.mListStyleType, suffix); - // We're not going to do proper Bidi reordering on the list item marker, but - // just display the whole thing as RTL or LTR, so we fake reordering by - // appending the suffix to the end of the list item marker if the - // directionality of the characters is the same as the style direction or - // prepending it to the beginning if they are different. - result = (mTextIsRTL == (vis->mDirection == NS_STYLE_DIRECTION_RTL)) ? - result + suffix : suffix + result; + result.Truncate(); + if (GetWritingMode().IsBidiLTR() != isRTL) { + result.Append(number); + } else { + // RLM = 0x200f, LRM = 0x200e + char16_t mark = isRTL ? 0x200f : 0x200e; + result.Append(mark); + result.Append(number); + result.Append(mark); + } + result.Append(suffix); } #define MIN_BULLET_SIZE 1 @@ -1579,14 +1581,26 @@ nsBulletFrame::GetDesiredSize(nsPresContext* aCX, case NS_STYLE_LIST_STYLE_DISC: case NS_STYLE_LIST_STYLE_CIRCLE: - case NS_STYLE_LIST_STYLE_SQUARE: + case NS_STYLE_LIST_STYLE_SQUARE: { ascent = fm->MaxAscent(); bulletSize = std::max(nsPresContext::CSSPixelsToAppUnits(MIN_BULLET_SIZE), NSToCoordRound(0.8f * (float(ascent) / 2.0f))); mPadding.bottom = NSToCoordRound(float(ascent) / 8.0f); aMetrics.Width() = aMetrics.Height() = bulletSize; aMetrics.SetTopAscent(bulletSize + mPadding.bottom); + + // Add spacing to the padding + nscoord halfEm = fm->EmHeight() / 2; + WritingMode wm = GetWritingMode(); + if (wm.IsVertical()) { + mPadding.bottom += halfEm; + } else if (wm.IsBidiLTR()) { + mPadding.right += halfEm; + } else { + mPadding.left += halfEm; + } break; + } default: case NS_STYLE_LIST_STYLE_DECIMAL_LEADING_ZERO: @@ -1669,22 +1683,15 @@ nsBulletFrame::Reflow(nsPresContext* aPresContext, SetFontSizeInflation(inflation); // Get the base size - // This will also set mSuppressPadding appropriately (via GetListItemText()) - // for the builtin counter styles with ideographic comma as suffix where the - // default padding from ua.css is not desired. GetDesiredSize(aPresContext, aReflowState.rendContext, aMetrics, inflation); // Add in the border and padding; split the top/bottom between the // ascent and descent to make things look nice const nsMargin& borderPadding = aReflowState.ComputedPhysicalBorderPadding(); - if (!mSuppressPadding || - aPresContext->HasAuthorSpecifiedRules(this, - NS_AUTHOR_SPECIFIED_PADDING)) { - mPadding.top += NSToCoordRound(borderPadding.top * inflation); - mPadding.right += NSToCoordRound(borderPadding.right * inflation); - mPadding.bottom += NSToCoordRound(borderPadding.bottom * inflation); - mPadding.left += NSToCoordRound(borderPadding.left * inflation); - } + mPadding.top += NSToCoordRound(borderPadding.top * inflation); + mPadding.right += NSToCoordRound(borderPadding.right * inflation); + mPadding.bottom += NSToCoordRound(borderPadding.bottom * inflation); + mPadding.left += NSToCoordRound(borderPadding.left * inflation); aMetrics.Width() += mPadding.left + mPadding.right; aMetrics.Height() += mPadding.top + mPadding.bottom; aMetrics.SetTopAscent(aMetrics.TopAscent() + mPadding.top); diff --git a/layout/generic/nsBulletFrame.h b/layout/generic/nsBulletFrame.h index 24bb82e8d428..354e9be81fb1 100644 --- a/layout/generic/nsBulletFrame.h +++ b/layout/generic/nsBulletFrame.h @@ -85,8 +85,7 @@ public: /* get suffix of list item */ static void GetListItemSuffix(int32_t aListStyleType, - nsString& aResult, - bool& aSuppressPadding); + nsString& aResult); /* get list item text, with '.' */ void GetListItemText(const nsStyleList& aStyleList, nsString& aResult); @@ -124,13 +123,6 @@ protected: nsSize mIntrinsicSize; int32_t mOrdinal; - bool mTextIsRTL; - - // If set to true, any padding of bullet defined in the UA style sheet will - // be suppressed. This is used for some CJK numbering styles where extra - // space after the suffix is not desired. Note that, any author-specified - // padding overriding the default style will NOT be suppressed. - bool mSuppressPadding; private: diff --git a/layout/generic/test/test_bug633762.html b/layout/generic/test/test_bug633762.html index c85b8117759e..6ceca6338fc9 100644 --- a/layout/generic/test/test_bug633762.html +++ b/layout/generic/test/test_bug633762.html @@ -35,8 +35,8 @@ function runTests() { scrollTopBefore = doc.body.scrollTop; // send up arrow key event sendKey("UP"); - - setTimeout("finish();", 20); + + window.requestAnimationFrame(finish); } function finish() { diff --git a/layout/ipc/RenderFrameChild.cpp b/layout/ipc/RenderFrameChild.cpp index 008fe5abccf7..347f213a69a1 100644 --- a/layout/ipc/RenderFrameChild.cpp +++ b/layout/ipc/RenderFrameChild.cpp @@ -37,6 +37,11 @@ RenderFrameChild::AllocPLayerTransactionChild() { LayerTransactionChild* c = new LayerTransactionChild(); c->AddIPDLReference(); + // We only create PLayerTransaction objects through PRenderFrame when we don't + // have a PCompositor. This means that the child process content will never + // get drawn to the screen, but some tests rely on it pretending to function + // for now. + c->SetHasNoCompositor(); return c; } diff --git a/layout/ipc/RenderFrameParent.cpp b/layout/ipc/RenderFrameParent.cpp index febb91e75efb..bdfc212b275a 100644 --- a/layout/ipc/RenderFrameParent.cpp +++ b/layout/ipc/RenderFrameParent.cpp @@ -809,6 +809,7 @@ RenderFrameParent::ContentViewScaleChanged(nsContentView* aView) void RenderFrameParent::ShadowLayersUpdated(LayerTransactionParent* aLayerTree, + const uint64_t& aTransactionId, const TargetConfig& aTargetConfig, bool aIsFirstPaint, bool aScheduleComposite, diff --git a/layout/ipc/RenderFrameParent.h b/layout/ipc/RenderFrameParent.h index 2d98dcc2c5b3..38a49de70bee 100644 --- a/layout/ipc/RenderFrameParent.h +++ b/layout/ipc/RenderFrameParent.h @@ -79,6 +79,7 @@ public: void ContentViewScaleChanged(nsContentView* aView); virtual void ShadowLayersUpdated(LayerTransactionParent* aLayerTree, + const uint64_t& aTransactionId, const TargetConfig& aTargetConfig, bool aIsFirstPaint, bool aScheduleComposite, diff --git a/layout/reftests/counters/counter-suffix-ref.html b/layout/reftests/counters/counter-suffix-ref.html index 71eec37d9c49..228db22a60d3 100644 --- a/layout/reftests/counters/counter-suffix-ref.html +++ b/layout/reftests/counters/counter-suffix-ref.html @@ -8,56 +8,30 @@ text-align: end; box-sizing: border-box; } - .def span { -moz-padding-end: 0.5em; } - .fix span { -moz-padding-end: 1em; }
-

- 1.foo
- 2.bar +

+ 1. foo
+ 2. bar

-

- א.foo
- ב.bar +

+ א. foo
+ ב. bar

一、foo
二、bar

-

- 일,foo
- 이,bar +

+ 일, foo
+ 이, bar

-

- 1.foo
- 2.bar +

+ 1. foo
+ 2. bar

-

- א.foo
- ב.bar -

-

- 一、foo
- 二、bar -

-

- 일,foo
- 이,bar -

-

- 1.foo
- 2.bar -

-

- א.foo
- ב.bar -

-

- 1.foo
- 2.bar -

-

- א.foo
- ב.bar +

+ א. foo
+ ב. bar

diff --git a/layout/reftests/counters/counter-suffix.html b/layout/reftests/counters/counter-suffix.html index d1325f4b976c..6378d7500bdd 100644 --- a/layout/reftests/counters/counter-suffix.html +++ b/layout/reftests/counters/counter-suffix.html @@ -7,21 +7,12 @@ .heb { list-style-type: hebrew; } .cjk { list-style-type: cjk-decimal; } .kor { list-style-type: korean-hangul-formal; } - .spec li::-moz-list-number { - -moz-padding-end: 1em; - }
  1. foo
  2. bar
  1. foo
  2. bar
  1. foo
  2. bar
  1. foo
  2. bar
-
  1. foo
  2. bar
-
  1. foo
  2. bar
-
  1. foo
  2. bar
-
  1. foo
  2. bar
  1. foo
  2. bar
  1. foo
  2. bar
-
  1. foo
  2. bar
-
  1. foo
  2. bar
diff --git a/layout/reftests/counters/reftest.list b/layout/reftests/counters/reftest.list index 1172519fbc4a..39426b9ff18a 100644 --- a/layout/reftests/counters/reftest.list +++ b/layout/reftests/counters/reftest.list @@ -68,7 +68,7 @@ fails-if(B2G&&browserIsRemote) == t1204-reset-02-c-o-test.html t1204-reset-02-c- == counter-ua-limits-list-00.html counter-ua-limits-list-00-ref.html == counter-ua-limits-list-01.html counter-ua-limits-list-01-ref.html == multiple-thai-counters.html multiple-thai-counters-ref.html -fails-if(B2G) == counter-suffix.html counter-suffix-ref.html # B2G kerning +== counter-suffix.html counter-suffix-ref.html == counter-cjk-decimal.html counter-cjk-decimal-ref.html == counter-japanese-informal.html counter-japanese-informal-ref.html == counter-japanese-formal.html counter-japanese-formal-ref.html diff --git a/layout/reftests/list-item/bullet-space-1-ref.html b/layout/reftests/list-item/bullet-space-1-ref.html new file mode 100644 index 000000000000..f62ad5e1fed2 --- /dev/null +++ b/layout/reftests/list-item/bullet-space-1-ref.html @@ -0,0 +1,75 @@ + + + + + + Bug 1017335 + + + + + +
+
+
    +
  • foo
  • +
+
+ +
+
    +
  • bar
  • +
+
+
+ + +
+
+ foo +
+ +
+ bar +
+
+ + + + diff --git a/layout/reftests/list-item/bullet-space-1.html b/layout/reftests/list-item/bullet-space-1.html new file mode 100644 index 000000000000..02cdfd218094 --- /dev/null +++ b/layout/reftests/list-item/bullet-space-1.html @@ -0,0 +1,75 @@ + + + + + + Bug 1017335 + + + + + +
+
+
    +
  • foo
  • +
+
+ +
+
    +
  • bar
  • +
+
+
+ + +
+
+ foo +
+ +
+ bar +
+
+ + + + diff --git a/layout/reftests/list-item/numbering-2-ref.html b/layout/reftests/list-item/numbering-2-ref.html index 85888f2335b5..b69644037177 100644 --- a/layout/reftests/list-item/numbering-2-ref.html +++ b/layout/reftests/list-item/numbering-2-ref.html @@ -6,7 +6,7 @@ html,body { color:black; background-color:white; font-size:12px; padding:0; margin:0; } -li {margin-left:0; padding-left:0px; } +li {margin-left:0; padding-left:0px; width:200px; } ol {margin-left:0; padding-left:40px; } diff --git a/layout/reftests/list-item/numbering-2.html b/layout/reftests/list-item/numbering-2.html index aee2a6f08db2..02865411bfbe 100644 --- a/layout/reftests/list-item/numbering-2.html +++ b/layout/reftests/list-item/numbering-2.html @@ -6,7 +6,7 @@ html,body { color:black; background-color:white; font-size:12px; padding:0; margin:0; } -li {margin-left:0; padding-left:0px; } +li {margin-left:0; padding-left:0px; width:200px; } ol {margin-left:0; padding-left:40px; } diff --git a/layout/reftests/list-item/reftest.list b/layout/reftests/list-item/reftest.list index 8a6866671a59..c7a2b6b807e5 100644 --- a/layout/reftests/list-item/reftest.list +++ b/layout/reftests/list-item/reftest.list @@ -5,3 +5,4 @@ asserts(1) == ol-reversed-1b.html ol-reversed-1-ref.html # bug 478135 == ol-reversed-1c.html ol-reversed-1-ref.html == ol-reversed-2.html ol-reversed-2-ref.html == ol-reversed-3.html ol-reversed-3-ref.html +== bullet-space-1.html bullet-space-1-ref.html diff --git a/layout/reftests/pixel-rounding/reftest.list b/layout/reftests/pixel-rounding/reftest.list index 1c1cb20cb5ad..6e3055045573 100644 --- a/layout/reftests/pixel-rounding/reftest.list +++ b/layout/reftests/pixel-rounding/reftest.list @@ -174,3 +174,6 @@ skip-if(B2G) random-if(Android) == border-image-width-4.html border-image-width- skip-if(B2G) random-if(Android) == border-image-width-9.html border-image-width-0.html # bug 661996 # bug 773482 == iframe-1.html iframe-1-ref.html + +== viewport-units-rounding-1.html viewport-units-rounding-1-ref.html +== viewport-units-rounding-2.html about:blank diff --git a/layout/reftests/pixel-rounding/viewport-units-rounding-1-frame.html b/layout/reftests/pixel-rounding/viewport-units-rounding-1-frame.html new file mode 100644 index 000000000000..955b873dd6df --- /dev/null +++ b/layout/reftests/pixel-rounding/viewport-units-rounding-1-frame.html @@ -0,0 +1,51 @@ + +subframe for test of viewport units rounding + + + +
+
+
+
+
+
+ + + +
+
+
+
+
+
+
+ +
+
+
+
+
+
+
diff --git a/layout/reftests/pixel-rounding/viewport-units-rounding-1-ref.html b/layout/reftests/pixel-rounding/viewport-units-rounding-1-ref.html new file mode 100644 index 000000000000..f762f004ffca --- /dev/null +++ b/layout/reftests/pixel-rounding/viewport-units-rounding-1-ref.html @@ -0,0 +1,123 @@ + +viewport units rounding reference + + + +
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ +
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ +
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ +
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ +
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/layout/reftests/pixel-rounding/viewport-units-rounding-1.html b/layout/reftests/pixel-rounding/viewport-units-rounding-1.html new file mode 100644 index 000000000000..a31154546775 --- /dev/null +++ b/layout/reftests/pixel-rounding/viewport-units-rounding-1.html @@ -0,0 +1,12 @@ + +viewport units rounding + + + + + + + + diff --git a/layout/reftests/pixel-rounding/viewport-units-rounding-2.html b/layout/reftests/pixel-rounding/viewport-units-rounding-2.html new file mode 100644 index 000000000000..f92009fd6e06 --- /dev/null +++ b/layout/reftests/pixel-rounding/viewport-units-rounding-2.html @@ -0,0 +1,43 @@ + +viewport units rounding test + + + + diff --git a/layout/style/AnimationCommon.h b/layout/style/AnimationCommon.h index 92aeb76cc93d..bb9c72bf5103 100644 --- a/layout/style/AnimationCommon.h +++ b/layout/style/AnimationCommon.h @@ -310,6 +310,19 @@ struct ElementAnimation return (IsPaused() ? mPauseStart : aTime) - mStartTime - mDelay; } + // Return the duration of the active interval. + mozilla::TimeDuration ActiveDuration() const { + return mTiming.mIterationDuration.MultDouble(mTiming.mIterationCount); + } + + // Return the duration from the start the active interval to the point where + // the animation begins playback. This is zero unless the animation has + // a negative delay in which case it is the absolute value of the delay. + // This is used for setting the elapsedTime member of AnimationEvents. + mozilla::TimeDuration InitialAdvance() const { + return std::max(TimeDuration(), mDelay * -1); + } + // This function takes as input the timing parameters of an animation and // returns the computed timing at the specified moment. // diff --git a/layout/style/nsAnimationManager.cpp b/layout/style/nsAnimationManager.cpp index 0afa78885d97..db2b123ddb5c 100644 --- a/layout/style/nsAnimationManager.cpp +++ b/layout/style/nsAnimationManager.cpp @@ -252,13 +252,19 @@ ElementAnimations::GetEventsAt(TimeStamp aRefreshTime, ? NS_ANIMATION_START : NS_ANIMATION_ITERATION; anim->mLastNotification = computedTiming.mCurrentIteration; + TimeDuration iterationStart = + anim->mTiming.mIterationDuration * + computedTiming.mCurrentIteration; + TimeDuration elapsedTime = + std::max(iterationStart, anim->InitialAdvance()); AnimationEventInfo ei(mElement, anim->mName, message, - elapsedDuration, PseudoElement()); + elapsedTime, PseudoElement()); aEventsToDispatch.AppendElement(ei); } break; case ComputedTiming::AnimationPhase_After: + TimeDuration activeDuration = anim->ActiveDuration(); // If we skipped the animation interval entirely, dispatch // 'animationstart' first if (anim->mLastNotification == @@ -267,8 +273,10 @@ ElementAnimations::GetEventsAt(TimeStamp aRefreshTime, // (This is overwritten below but we set it here to maintain // internal consistency.) anim->mLastNotification = 0; + TimeDuration elapsedTime = + std::min(anim->InitialAdvance(), activeDuration); AnimationEventInfo ei(mElement, anim->mName, NS_ANIMATION_START, - elapsedDuration, PseudoElement()); + elapsedTime, PseudoElement()); aEventsToDispatch.AppendElement(ei); } // Dispatch 'animationend' when needed. @@ -276,7 +284,7 @@ ElementAnimations::GetEventsAt(TimeStamp aRefreshTime, ElementAnimation::LAST_NOTIFICATION_END) { anim->mLastNotification = ElementAnimation::LAST_NOTIFICATION_END; AnimationEventInfo ei(mElement, anim->mName, NS_ANIMATION_END, - elapsedDuration, PseudoElement()); + activeDuration, PseudoElement()); aEventsToDispatch.AppendElement(ei); } break; diff --git a/layout/style/nsRuleNode.cpp b/layout/style/nsRuleNode.cpp index 12aec2a2eebd..5c551e06b526 100644 --- a/layout/style/nsRuleNode.cpp +++ b/layout/style/nsRuleNode.cpp @@ -223,9 +223,21 @@ struct CalcLengthCalcOps : public css::BasicCoordCalcOps, } }; -static inline nscoord ScaleCoord(const nsCSSValue &aValue, float factor) +static inline nscoord ScaleCoordRound(const nsCSSValue& aValue, float aFactor) { - return NSToCoordRoundWithClamp(aValue.GetFloatValue() * factor); + return NSToCoordRoundWithClamp(aValue.GetFloatValue() * aFactor); +} + +static inline nscoord ScaleViewportCoordTrunc(const nsCSSValue& aValue, + nscoord aViewportSize) +{ + // For units (like percentages and viewport units) where authors might + // repeatedly use a value and expect some multiple of the value to be + // smaller than a container, we need to use floor rather than round. + // We need to use division by 100.0 rather than multiplication by 0.1f + // to avoid introducing error. + return NSToCoordTruncClamped(aValue.GetFloatValue() * + aViewportSize / 100.0f); } already_AddRefed @@ -351,18 +363,22 @@ static nscoord CalcLengthWith(const nsCSSValue& aValue, // for an increased cost to dynamic changes to the viewport size // when viewport units are in use. case eCSSUnit_ViewportWidth: { - return ScaleCoord(aValue, 0.01f * CalcViewportUnitsScale(aPresContext).width); + nscoord viewportWidth = CalcViewportUnitsScale(aPresContext).width; + return ScaleViewportCoordTrunc(aValue, viewportWidth); } case eCSSUnit_ViewportHeight: { - return ScaleCoord(aValue, 0.01f * CalcViewportUnitsScale(aPresContext).height); + nscoord viewportHeight = CalcViewportUnitsScale(aPresContext).height; + return ScaleViewportCoordTrunc(aValue, viewportHeight); } case eCSSUnit_ViewportMin: { nsSize vuScale(CalcViewportUnitsScale(aPresContext)); - return ScaleCoord(aValue, 0.01f * min(vuScale.width, vuScale.height)); + nscoord viewportMin = min(vuScale.width, vuScale.height); + return ScaleViewportCoordTrunc(aValue, viewportMin); } case eCSSUnit_ViewportMax: { nsSize vuScale(CalcViewportUnitsScale(aPresContext)); - return ScaleCoord(aValue, 0.01f * max(vuScale.width, vuScale.height)); + nscoord viewportMax = max(vuScale.width, vuScale.height); + return ScaleViewportCoordTrunc(aValue, viewportMax); } // While we could deal with 'rem' units correctly by simply not // caching any data that uses them in the rule tree, it's valuable @@ -415,7 +431,7 @@ static nscoord CalcLengthWith(const nsCSSValue& aValue, rootFontSize = rootStyleFont->mFont.size; } - return ScaleCoord(aValue, float(rootFontSize)); + return ScaleCoordRound(aValue, float(rootFontSize)); } default: // Fall through to the code for units that can't be stored in the @@ -436,13 +452,13 @@ static nscoord CalcLengthWith(const nsCSSValue& aValue, case eCSSUnit_EM: { // CSS2.1 specifies that this unit scales to the computed font // size, not the em-width in the font metrics, despite the name. - return ScaleCoord(aValue, float(aFontSize)); + return ScaleCoordRound(aValue, float(aFontSize)); } case eCSSUnit_XHeight: { nsRefPtr fm = GetMetricsFor(aPresContext, aStyleContext, styleFont, aFontSize, aUseUserFontSet); - return ScaleCoord(aValue, float(fm->XHeight())); + return ScaleCoordRound(aValue, float(fm->XHeight())); } case eCSSUnit_Char: { nsRefPtr fm = @@ -451,8 +467,8 @@ static nscoord CalcLengthWith(const nsCSSValue& aValue, gfxFloat zeroWidth = (fm->GetThebesFontGroup()->GetFontAt(0) ->GetMetrics().zeroOrAveCharWidth); - return ScaleCoord(aValue, ceil(aPresContext->AppUnitsPerDevPixel() * - zeroWidth)); + return ScaleCoordRound(aValue, ceil(aPresContext->AppUnitsPerDevPixel() * + zeroWidth)); } default: NS_NOTREACHED("unexpected unit"); diff --git a/layout/style/test/test_animations.html b/layout/style/test/test_animations.html index 9c44d2338210..f6b9b0ee81da 100644 --- a/layout/style/test/test_animations.html +++ b/layout/style/test/test_animations.html @@ -1270,7 +1270,7 @@ advance_clock(300); is(cs_before.marginRight, "30px", ":before test at 2300ms"); advance_clock(700); check_events([ { type: "animationstart", animationName: "anim2", elapsedTime: 0, pseudoElement: "::before" }, - { type: "animationiteration", animationName: "anim2", elapsedTime: 1.2, pseudoElement: "::before" }, + { type: "animationiteration", animationName: "anim2", elapsedTime: 1, pseudoElement: "::before" }, { type: "animationiteration", animationName: "anim2", elapsedTime: 2, pseudoElement: "::before" }, { type: "animationend", animationName: "anim2", elapsedTime: 3, pseudoElement: "::before" }]); done_div(); @@ -1291,7 +1291,7 @@ advance_clock(300); is(cs_after.marginRight, "30px", ":after test at 2300ms"); advance_clock(700); check_events([ { type: "animationstart", animationName: "anim2", elapsedTime: 0, pseudoElement: "::after" }, - { type: "animationiteration", animationName: "anim2", elapsedTime: 1.2, pseudoElement: "::after" }, + { type: "animationiteration", animationName: "anim2", elapsedTime: 1, pseudoElement: "::after" }, { type: "animationiteration", animationName: "anim2", elapsedTime: 2, pseudoElement: "::after" }, { type: "animationend", animationName: "anim2", elapsedTime: 3, pseudoElement: "::after" }]); done_div(); @@ -1548,16 +1548,47 @@ listen(); advance_clock(0); // Trigger animation advance_clock(1200); // Skip past end of animation's entire active duration check_events([{ type: 'animationstart', target: div, - // Bug 1007513 - elapsedTime should be 0 below - animationName: 'anim2', elapsedTime: 1.1, + animationName: 'anim2', elapsedTime: 0, pseudoElement: "" }, { type: 'animationend', target: div, - // Bug 1007513 - elapsedTime should be 1 below - animationName: 'anim2', elapsedTime: 1.1, + animationName: 'anim2', elapsedTime: 1, pseudoElement: "" }], "events after skipping over animation interval"); done_div(); +/* + * Bug 1007513 - AnimationEvent.elapsedTime should be animation time + */ +new_div("animation: anim2 1s 2"); +listen(); +advance_clock(0); // Trigger animation +advance_clock(500); // Jump to middle of first interval +advance_clock(1000); // Jump to middle of second interval +advance_clock(1000); // Jump past end of last interval +check_events([{ type: 'animationstart', target: div, + animationName: 'anim2', elapsedTime: 0, + pseudoElement: "" }, + { type: 'animationiteration', target: div, + animationName: 'anim2', elapsedTime: 1, + pseudoElement: "" }, + { type: 'animationend', target: div, + animationName: 'anim2', elapsedTime: 2, + pseudoElement: "" }], + "events after skipping past event moments"); +done_div(); + +new_div("animation: anim2 1s -2s"); +listen(); +advance_clock(0); +check_events([{ type: 'animationstart', target: div, + animationName: 'anim2', elapsedTime: 1, + pseudoElement: "" }, + { type: 'animationend', target: div, + animationName: 'anim2', elapsedTime: 1, + pseudoElement: "" }], + "events after skipping over animation with negative delay"); +done_div(); + SpecialPowers.DOMWindowUtils.restoreNormalRefresh(); diff --git a/layout/style/test/test_animations_omta.html b/layout/style/test/test_animations_omta.html index e4facc193c09..23085e9f604c 100644 --- a/layout/style/test/test_animations_omta.html +++ b/layout/style/test/test_animations_omta.html @@ -1856,17 +1856,26 @@ addAsyncTest(function *() { advance_clock(0); // Trigger animation advance_clock(1200); // Skip past end of animation's entire active duration check_events([{ type: 'animationstart', target: gDiv, - // Bug 1007513 - elapsedTime should be 0 below - animationName: 'anim2', elapsedTime: 1.1, + animationName: 'anim2', elapsedTime: 0, pseudoElement: "" }, { type: 'animationend', target: gDiv, - // Bug 1007513 - elapsedTime should be 1 below - animationName: 'anim2', elapsedTime: 1.1, + animationName: 'anim2', elapsedTime: 1, pseudoElement: "" }], "events after skipping over animation interval"); done_div(); }); +/* + * Bug 1007513 - AnimationEvent.elapsedTime should be animation time + * + * There is no OMTA-version of this test since it is specific to the + * contents of animation events which are dispatched on the main thread. + * + * We *do* provide an OMTA-version of some tests regarding the *dispatch* of + * events to catch possible regressions if in future event dispatch is tied + * to animation throttling. + */ + //---------------------------------------------------------------------- // // Helper functions from test_animations.html diff --git a/layout/style/ua.css b/layout/style/ua.css index 4ba0fb0d6a91..9696b6404ed3 100644 --- a/layout/style/ua.css +++ b/layout/style/ua.css @@ -77,9 +77,6 @@ *|*::-moz-list-bullet, *|*::-moz-list-number { display: inline; vertical-align: baseline; - /* Note that this padding is suppressed for some CJK numbering styles; - * see bug 934072 */ - -moz-padding-end: 0.5em; } /* Links */ diff --git a/media/mtransport/third_party/nICEr/src/stun/addrs.c b/media/mtransport/third_party/nICEr/src/stun/addrs.c index 738470fe300b..2ebf5483cc55 100644 --- a/media/mtransport/third_party/nICEr/src/stun/addrs.c +++ b/media/mtransport/third_party/nICEr/src/stun/addrs.c @@ -53,8 +53,8 @@ static char *RCSSTRING __UNUSED__="$Id: addrs.c,v 1.2 2008/04/28 18:21:30 ekr Ex #undef __unused #include #endif -#include #ifndef LINUX +#include #if !defined(__OpenBSD__) && !defined(__NetBSD__) #include #endif diff --git a/media/mtransport/third_party/nICEr/src/stun/stun.h b/media/mtransport/third_party/nICEr/src/stun/stun.h index a5338308c1ed..8e5a60750532 100644 --- a/media/mtransport/third_party/nICEr/src/stun/stun.h +++ b/media/mtransport/third_party/nICEr/src/stun/stun.h @@ -39,13 +39,15 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #else #include #include -#include #ifndef LINUX +#include #if !defined(__OpenBSD__) && !defined(__NetBSD__) #include #endif #include #include +#else +#include #endif #ifndef BSD #include diff --git a/media/webrtc/signaling/signaling.gyp b/media/webrtc/signaling/signaling.gyp index 17a9ab492bb4..3217c61bb25e 100644 --- a/media/webrtc/signaling/signaling.gyp +++ b/media/webrtc/signaling/signaling.gyp @@ -50,7 +50,6 @@ './include', './src/sipcc/include', './src/sipcc/cpr/include', - '../../../ipc/chromium/src', '../../../ipc/chromium/src/base/third_party/nspr', '../../../xpcom/base', '../../../dom/base', @@ -63,9 +62,6 @@ '../trunk/webrtc/modules/interface', '../trunk/webrtc/peerconnection', '../../libyuv/include', - '../../../netwerk/srtp/src/include', - '../../../netwerk/srtp/src/crypto/include', - '../../../ipc/chromium/src', '../../mtransport/third_party/nrappkit/src/util/libekr', ], @@ -160,8 +156,6 @@ './src/mediapipeline/MediaPipeline.cpp', './src/mediapipeline/MediaPipelineFilter.h', './src/mediapipeline/MediaPipelineFilter.cpp', - './src/mediapipeline/SrtpFlow.h', - './src/mediapipeline/SrtpFlow.cpp', ], # @@ -194,12 +188,28 @@ # Conditionals # 'conditions': [ + # hack so I can change the include flow for SrtpFlow + ['build_with_mozilla==1', { + 'sources': [ + './src/mediapipeline/SrtpFlow.h', + './src/mediapipeline/SrtpFlow.cpp', + ], + 'include_dirs!': [ + '../trunk/webrtc', + ], + 'include_dirs': [ + '../../../netwerk/srtp/src/include', + '../../../netwerk/srtp/src/crypto/include', + ], + }], ['moz_webrtc_omx==1', { 'sources': [ './src/media-conduit/WebrtcOMXH264VideoCodec.cpp', './src/media-conduit/OMXVideoCodec.cpp', ], 'include_dirs': [ + # hack on hack to re-add it after SrtpFlow removes it + '../../webrtc/trunk/webrtc', '../../../content/media/omx', '../../../gfx/layers/client', ], @@ -238,6 +248,7 @@ ], 'defines': [ + 'OS_LINUX', 'SIP_OS_LINUX', '_GNU_SOURCE', 'LINUX', @@ -252,6 +263,7 @@ 'include_dirs': [ ], 'defines': [ + 'OS_WIN', 'SIP_OS_WINDOWS', 'WIN32', 'GIPS_VER=3480', @@ -279,6 +291,7 @@ 'include_dirs': [ ], 'defines': [ + 'OS_MACOSX', 'SIP_OS_OSX', 'OSX', '_FORTIFY_SOURCE=2', diff --git a/media/webrtc/signaling/src/callcontrol/CallControlManagerImpl.cpp b/media/webrtc/signaling/src/callcontrol/CallControlManagerImpl.cpp index 6615a18447fa..bb6af5c6d678 100755 --- a/media/webrtc/signaling/src/callcontrol/CallControlManagerImpl.cpp +++ b/media/webrtc/signaling/src/callcontrol/CallControlManagerImpl.cpp @@ -18,6 +18,8 @@ #include "CallControlManagerImpl.h" #include "csf_common.h" +#include "base/platform_thread.h" + extern "C" { #include "config_api.h" diff --git a/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp b/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp index f2de89fed70b..b4dbb02634fc 100644 --- a/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp +++ b/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp @@ -751,7 +751,8 @@ WebrtcAudioConduit::ReceivedRTPPacket(const void *data, int len) } #endif - if(mPtrVoENetwork->ReceivedRTPPacket(mChannel,data,len) == -1) + // XXX we need to get passed the time the packet was received + if(mPtrVoENetwork->ReceivedRTPPacket(mChannel, data, len) == -1) { int error = mPtrVoEBase->LastError(); CSFLogError(logTag, "%s RTP Processing Error %d", __FUNCTION__, error); diff --git a/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp b/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp index 551908f01fde..fc6aab3fe316 100644 --- a/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp +++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp @@ -18,6 +18,7 @@ #include "nsIPrefService.h" #include "nsIPrefBranch.h" +#include "webrtc/common_types.h" #include "webrtc/common_video/interface/native_handle.h" #include "webrtc/video_engine/include/vie_errors.h" @@ -244,12 +245,10 @@ MediaConduitErrorCode WebrtcVideoConduit::Init(WebrtcVideoConduit *other) } else { #ifdef MOZ_WIDGET_ANDROID - jobject context = jsjni_GetGlobalContextRef(); - // get the JVM JavaVM *jvm = jsjni_GetVM(); - if (webrtc::VideoEngine::SetAndroidObjects(jvm, (void*)context) != 0) { + if (webrtc::VideoEngine::SetAndroidObjects(jvm) != 0) { CSFLogError(logTag, "%s: could not set Android objects", __FUNCTION__); return kMediaConduitSessionNotInited; } @@ -1025,7 +1024,8 @@ WebrtcVideoConduit::ReceivedRTPPacket(const void *data, int len) if(mEngineReceiving) { // let the engine know of a RTP packet to decode - if(mPtrViENetwork->ReceivedRTPPacket(mChannel,data,len) == -1) + // XXX we need to get passed the time the packet was received + if(mPtrViENetwork->ReceivedRTPPacket(mChannel, data, len, webrtc::PacketTime()) == -1) { int error = mPtrViEBase->LastError(); CSFLogError(logTag, "%s RTP Processing Failed %d ", __FUNCTION__, error); diff --git a/media/webrtc/signaling/src/media-conduit/VideoConduit.h b/media/webrtc/signaling/src/media-conduit/VideoConduit.h index 0d0191a6e9e4..e82e21d4e19b 100755 --- a/media/webrtc/signaling/src/media-conduit/VideoConduit.h +++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.h @@ -11,6 +11,8 @@ #include "MediaConduitInterface.h" #include "MediaEngineWrapper.h" +// conflicts with #include of scoped_ptr.h +#undef FF // Video Engine Includes #include "webrtc/common_types.h" #ifdef FF diff --git a/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp b/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp index 62faec87aa0b..b3d2f94addf5 100644 --- a/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp +++ b/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp @@ -372,6 +372,7 @@ public: break; case ADDICECANDIDATEERROR: + mPC->OnAddIceCandidateError(); mObserver->OnAddIceCandidateError(mCode, ObString(mReason.c_str()), rv); break; @@ -493,6 +494,7 @@ PeerConnectionImpl::PeerConnectionImpl(const GlobalObject* aGlobal) , mNumAudioStreams(0) , mNumVideoStreams(0) , mHaveDataStream(false) + , mAddCandidateErrorCount(0) , mTrickle(true) // TODO(ekr@rtfm.com): Use pref { #ifdef MOZILLA_INTERNAL_API @@ -1961,6 +1963,16 @@ PeerConnectionImpl::IceConnectionStateChange_m(PCImplIceConnectionState aState) timeDelta.ToMilliseconds()); } } + + if (isSucceeded(aState)) { + Telemetry::Accumulate( + Telemetry::WEBRTC_ICE_ADD_CANDIDATE_ERRORS_GIVEN_SUCCESS, + mAddCandidateErrorCount); + } else if (isFailed(aState)) { + Telemetry::Accumulate( + Telemetry::WEBRTC_ICE_ADD_CANDIDATE_ERRORS_GIVEN_FAILURE, + mAddCandidateErrorCount); + } } #endif diff --git a/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.h b/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.h index be801c75cd85..afdaab9ed4dc 100644 --- a/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.h +++ b/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.h @@ -528,6 +528,10 @@ public: // is called to start the list over. void ClearSdpParseErrorMessages(); + void OnAddIceCandidateError() { + ++mAddCandidateErrorCount; + } + // Called to retreive the list of parsing errors. const std::vector &GetSdpParseErrors(); @@ -707,6 +711,7 @@ private: // Holder for error messages from parsing SDP std::vector mSDPParseErrorMessages; + unsigned int mAddCandidateErrorCount; bool mTrickle; diff --git a/media/webrtc/trunk/DEPS b/media/webrtc/trunk/DEPS index 1a44a3afbfb2..03bfb71da9e0 100644 --- a/media/webrtc/trunk/DEPS +++ b/media/webrtc/trunk/DEPS @@ -1,126 +1,315 @@ +use_relative_paths = True + vars = { + # Override root_dir in your .gclient's custom_vars to specify a custom root + # folder name. + "root_dir": "trunk", + "extra_gyp_flag": "-Dextra_gyp_flag=0", + # Use this googlecode_url variable only if there is an internal mirror for it. # If you do not know, use the full path while defining your new deps entry. "googlecode_url": "http://%s.googlecode.com/svn", + "sourceforge_url": "http://svn.code.sf.net/p/%(repo)s/code", "chromium_trunk" : "http://src.chromium.org/svn/trunk", - "chromium_revision": "162524", - # Still needs the libjingle_revision here because some of - # the deps have to be pulled from libjingle repository. - "libjingle_revision": "204", + "chromium_revision": "249215", + + # A small subset of WebKit is needed for the Android Python test framework. + "webkit_trunk": "http://src.chromium.org/blink/trunk", } # NOTE: Prefer revision numbers to tags for svn deps. Use http rather than # https; the latter can cause problems for users behind proxies. deps = { - "trunk/chromium_deps": + "../chromium_deps": File(Var("chromium_trunk") + "/src/DEPS@" + Var("chromium_revision")), - "trunk/third_party/webrtc": - From("trunk/chromium_deps", "src/third_party/webrtc"), + "../chromium_gn": + File(Var("chromium_trunk") + "/src/.gn@" + Var("chromium_revision")), - # WebRTC deps. - "trunk/third_party/libvpx": - From("trunk/chromium_deps", "src/third_party/libvpx"), - - "trunk/third_party/opus/source": - "http://git.opus-codec.org/opus.git", - - "trunk/build": + "build": Var("chromium_trunk") + "/src/build@" + Var("chromium_revision"), # Needed by common.gypi. - "trunk/google_apis/build": + "google_apis/build": Var("chromium_trunk") + "/src/google_apis/build@" + Var("chromium_revision"), - "trunk/testing/gtest": - From("trunk/chromium_deps", "src/testing/gtest"), + "testing": + Var("chromium_trunk") + "/src/testing@" + Var("chromium_revision"), - "trunk/tools/gyp": - From("trunk/chromium_deps", "src/tools/gyp"), + "testing/gmock": + From("chromium_deps", "src/testing/gmock"), - "trunk/tools/clang": - Var("chromium_trunk") + "/src/tools/clang@" + Var("chromium_revision"), + "testing/gtest": + From("chromium_deps", "src/testing/gtest"), - # Needed by build/common.gypi. - "trunk/tools/win/supalink": - Var("chromium_trunk") + "/src/tools/win/supalink@" + Var("chromium_revision"), + "third_party/clang_format": + Var("chromium_trunk") + "/src/third_party/clang_format@" + Var("chromium_revision"), - "trunk/third_party/protobuf": - Var("chromium_trunk") + "/src/third_party/protobuf@" + Var("chromium_revision"), + "third_party/clang_format/script": + From("chromium_deps", "src/third_party/clang_format/script"), - "trunk/third_party/libjpeg_turbo/": - From("trunk/chromium_deps", "src/third_party/libjpeg_turbo"), - - "trunk/third_party/libjpeg": - Var("chromium_trunk") + "/src/third_party/libjpeg@" + Var("chromium_revision"), - - "trunk/third_party/yasm": - Var("chromium_trunk") + "/src/third_party/yasm@" + Var("chromium_revision"), - - "trunk/third_party/expat": + "third_party/expat": Var("chromium_trunk") + "/src/third_party/expat@" + Var("chromium_revision"), - "trunk/third_party/yasm/source/patched-yasm": - From("trunk/chromium_deps", "src/third_party/yasm/source/patched-yasm"), + # When rolling gflags, also update deps/third_party/webrtc/webrtc.DEPS/DEPS + # in Chromium's repo. + "third_party/gflags/src": + (Var("googlecode_url") % "gflags") + "/trunk/src@84", - "trunk/third_party/libyuv": - From("trunk/chromium_deps", "src/third_party/libyuv"), + "third_party/icu/": + From("chromium_deps", "src/third_party/icu"), - # libjingle deps. - "trunk/third_party/libjingle/": - File(Var("chromium_trunk") + "/src/third_party/libjingle/libjingle.gyp@" + Var("chromium_revision")), - - "trunk/third_party/libjingle/source": - From("trunk/chromium_deps", "src/third_party/libjingle/source"), - - "trunk/third_party/libjingle/overrides/talk/base": - (Var("googlecode_url") % "libjingle") + "/trunk/talk/base@" + Var("libjingle_revision"), - - "trunk/third_party/libsrtp/": - From("trunk/chromium_deps", "src/third_party/libsrtp"), - - "trunk/third_party/jsoncpp/": + "third_party/jsoncpp/": Var("chromium_trunk") + "/src/third_party/jsoncpp@" + Var("chromium_revision"), - "trunk/third_party/jsoncpp/source": - "http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/jsoncpp@248", + "third_party/jsoncpp/source": + (Var("sourceforge_url") % {"repo": "jsoncpp"}) + "/trunk/jsoncpp@248", + + "third_party/junit/": + (Var("googlecode_url") % "webrtc") + "/deps/third_party/junit@3367", + + "third_party/libjpeg": + Var("chromium_trunk") + "/src/third_party/libjpeg@" + Var("chromium_revision"), + + "third_party/libjpeg_turbo": + From("chromium_deps", "src/third_party/libjpeg_turbo"), + + "third_party/libsrtp/": + From("chromium_deps", "src/third_party/libsrtp"), + + "third_party/libvpx": + Var("chromium_trunk") + "/deps/third_party/libvpx@248011", + + "third_party/libyuv": + (Var("googlecode_url") % "libyuv") + "/trunk@979", + + "third_party/opus": + Var("chromium_trunk") + "/src/third_party/opus@245176", + + "third_party/opus/src": + Var("chromium_trunk") + "/deps/third_party/opus@239448", + + "third_party/protobuf": + Var("chromium_trunk") + "/src/third_party/protobuf@" + Var("chromium_revision"), + + "third_party/sqlite/": + Var("chromium_trunk") + "/src/third_party/sqlite@" + Var("chromium_revision"), + + "third_party/yasm": + Var("chromium_trunk") + "/src/third_party/yasm@" + Var("chromium_revision"), + + "third_party/yasm/source/patched-yasm": + From("chromium_deps", "src/third_party/yasm/source/patched-yasm"), + + "tools/clang": + Var("chromium_trunk") + "/src/tools/clang@" + Var("chromium_revision"), + + "tools/gn": + Var("chromium_trunk") + "/src/tools/gn@" + Var("chromium_revision"), + + "tools/gyp": + From("chromium_deps", "src/tools/gyp"), + + "tools/protoc_wrapper": + Var("chromium_trunk") + "/src/tools/protoc_wrapper@" + Var("chromium_revision"), + + "tools/python": + Var("chromium_trunk") + "/src/tools/python@" + Var("chromium_revision"), + + "tools/swarming_client": + From("chromium_deps", "src/tools/swarming_client"), + + "tools/valgrind": + Var("chromium_trunk") + "/src/tools/valgrind@" + Var("chromium_revision"), + + # Needed by build/common.gypi. + "tools/win/supalink": + Var("chromium_trunk") + "/src/tools/win/supalink@" + Var("chromium_revision"), + + "net/third_party/nss": + Var("chromium_trunk") + "/src/net/third_party/nss@" + Var("chromium_revision"), + + "third_party/usrsctp/": + Var("chromium_trunk") + "/src/third_party/usrsctp@" + Var("chromium_revision"), + + "third_party/usrsctp/usrsctplib": + (Var("googlecode_url") % "sctp-refimpl") + "/trunk/KERN/usrsctp/usrsctplib@8723", } deps_os = { "win": { - "trunk/third_party/cygwin/": - Var("chromium_trunk") + "/deps/third_party/cygwin@66844", + "third_party/winsdk_samples/src": + (Var("googlecode_url") % "webrtc") + "/deps/third_party/winsdk_samples_v71@3145", - # Used by libjpeg-turbo - "trunk/third_party/yasm/binaries": - From("trunk/chromium_deps", "src/third_party/yasm/binaries"), + # Used by libjpeg-turbo. + "third_party/yasm/binaries": + From("chromium_deps", "src/third_party/yasm/binaries"), + + # NSS, for SSLClientSocketNSS. + "third_party/nss": + From("chromium_deps", "src/third_party/nss"), + + # SyzyASan to make it possible to run tests under ASan on Windows. + "third_party/syzygy/binaries": + From("chromium_deps", "src/third_party/syzygy/binaries"), }, + + "mac": { + # NSS, for SSLClientSocketNSS. + "third_party/nss": + From("chromium_deps", "src/third_party/nss"), + }, + + "ios": { + # NSS, for SSLClientSocketNSS. + "third_party/nss": + From("chromium_deps", "src/third_party/nss"), + + # class-dump utility to generate header files for undocumented SDKs. + "testing/iossim/third_party/class-dump": + From("chromium_deps", "src/testing/iossim/third_party/class-dump"), + + # Helper for running under the simulator. + "testing/iossim": + Var("chromium_trunk") + "/src/testing/iossim@" + Var("chromium_revision"), + }, + "unix": { - "trunk/third_party/gold": - From("trunk/chromium_deps", "src/third_party/gold"), + "third_party/gold": + From("chromium_deps", "src/third_party/gold"), + }, + + "android": { + # Precompiled tools needed for Android test execution. Needed since we can't + # compile them from source in WebRTC since they depend on Chromium's base. + "tools/android": + (Var("googlecode_url") % "webrtc") + "/deps/tools/android@4258", + + "third_party/android_tools": + From("chromium_deps", "src/third_party/android_tools"), + + "third_party/android_testrunner": + Var("chromium_trunk") + "/src/third_party/android_testrunner@" + Var("chromium_revision"), + + "third_party/WebKit/Tools/Scripts": + Var("webkit_trunk") + "/Tools/Scripts@151677", + + "third_party/openssl": + From("chromium_deps", "src/third_party/openssl"), }, } hooks = [ + { + # Copy .gn from temporary place (../chromium_gn) to root_dir. + "name": "copy .gn", + "pattern": ".", + "action": ["python", Var("root_dir") + "/build/cp.py", + Var("root_dir") + "/../chromium_gn/.gn", + Var("root_dir")], + }, + # Pull GN binaries. This needs to be before running GYP below. + { + "name": "gn_win", + "pattern": "tools/gn/bin/win/gn.exe.sha1", + "action": [ "download_from_google_storage", + "--no_resume", + "--platform=win32", + "--no_auth", + "--bucket", "chromium-gn", + "-s", Var("root_dir") + "/tools/gn/bin/win/gn.exe.sha1", + ], + }, + { + "name": "gn_mac", + "pattern": "tools/gn/bin/mac/gn.sha1", + "action": [ "download_from_google_storage", + "--no_resume", + "--platform=darwin", + "--no_auth", + "--bucket", "chromium-gn", + "-s", Var("root_dir") + "/tools/gn/bin/mac/gn.sha1", + ], + }, + { + "name": "gn_linux", + "pattern": "tools/gn/bin/linux/gn.sha1", + "action": [ "download_from_google_storage", + "--no_resume", + "--platform=linux*", + "--no_auth", + "--bucket", "chromium-gn", + "-s", Var("root_dir") + "/tools/gn/bin/linux/gn.sha1", + ], + }, + { + "name": "gn_linux32", + "pattern": "tools/gn/bin/linux/gn32.sha1", + "action": [ "download_from_google_storage", + "--no_resume", + "--platform=linux*", + "--no_auth", + "--bucket", "chromium-gn", + "-s", Var("root_dir") + "/tools/gn/bin/linux/gn32.sha1", + ], + }, + # Pull clang-format binaries using checked-in hashes. + { + "name": "clang_format_win", + "pattern": "third_party/clang_format/bin/win/clang-format.exe.sha1", + "action": [ "download_from_google_storage", + "--no_resume", + "--platform=win32", + "--no_auth", + "--bucket", "chromium-clang-format", + "-s", Var("root_dir") + "/third_party/clang_format/bin/win/clang-format.exe.sha1", + ], + }, + { + "name": "clang_format_mac", + "pattern": "third_party/clang_format/bin/mac/clang-format.sha1", + "action": [ "download_from_google_storage", + "--no_resume", + "--platform=darwin", + "--no_auth", + "--bucket", "chromium-clang-format", + "-s", Var("root_dir") + "/third_party/clang_format/bin/mac/clang-format.sha1", + ], + }, + { + "name": "clang_format_linux", + "pattern": "third_party/clang_format/bin/linux/clang-format.sha1", + "action": [ "download_from_google_storage", + "--no_resume", + "--platform=linux*", + "--no_auth", + "--bucket", "chromium-clang-format", + "-s", Var("root_dir") + "/third_party/clang_format/bin/linux/clang-format.sha1", + ], + }, { # Pull clang on mac. If nothing changed, or on non-mac platforms, this takes # zero seconds to run. If something changed, it downloads a prebuilt clang. "pattern": ".", - "action": ["python", "trunk/tools/clang/scripts/update.py", "--mac-only"], + "action": ["python", Var("root_dir") + "/tools/clang/scripts/update.py", + "--mac-only"], }, { - # Update the cygwin mount on Windows. - # This is necessary to get the correct mapping between e.g. /bin and the - # cygwin path on Windows. Without it we can't run bash scripts in actions. - # Ideally this should be solved in "pylib/gyp/msvs_emulation.py". - "pattern": ".", - "action": ["python", "trunk/build/win/setup_cygwin_mount.py", - "--win-only"], + # Download test resources, i.e. video and audio files from Google Storage. + "pattern": "\\.sha1", + "action": ["download_from_google_storage", + "--directory", + "--recursive", + "--num_threads=10", + "--no_auth", + "--bucket", "chromium-webrtc-resources", + Var("root_dir") + "/resources"], }, { # A change to a .gyp, .gypi, or to GYP itself should run the generator. "pattern": ".", - "action": ["python", "trunk/build/gyp_chromium", "--depth=trunk", "trunk/peerconnection_all.gyp"], + "action": ["python", Var("root_dir") + "/webrtc/build/gyp_webrtc", + Var("extra_gyp_flag")], }, ] diff --git a/media/webrtc/trunk/OWNERS b/media/webrtc/trunk/OWNERS index 1527445ec7f3..36d9928e725e 100644 --- a/media/webrtc/trunk/OWNERS +++ b/media/webrtc/trunk/OWNERS @@ -1,5 +1,12 @@ -henrike@webrtc.org -mallinath@webrtc.org -perkj@webrtc.org -wu@webrtc.org -tommi@webrtc.org +andrew@webrtc.org +henrika@webrtc.org +mflodman@webrtc.org +niklas.enbom@webrtc.org +tina.legrand@webrtc.org +tommi@webrtc.org +per-file *.isolate=kjellander@webrtc.org +per-file .gitignore=* +per-file AUTHORS=* +per-file DEPS=* +per-file WATCHLISTS=* +per-file webrtc_examples.gyp=* diff --git a/media/webrtc/trunk/build/all.gyp b/media/webrtc/trunk/build/all.gyp index 92f7eb4f1dba..f402ef2335f7 100644 --- a/media/webrtc/trunk/build/all.gyp +++ b/media/webrtc/trunk/build/all.gyp @@ -20,7 +20,6 @@ '../testing/gmock.gyp:*', '../testing/gtest.gyp:*', '../third_party/bzip2/bzip2.gyp:*', - '../third_party/icu/icu.gyp:*', '../third_party/libxml/libxml.gyp:*', '../third_party/sqlite/sqlite.gyp:*', '../third_party/zlib/zlib.gyp:*', @@ -104,7 +103,7 @@ }], ], }], - ['toolkit_uses_gtk==1', { + ['(toolkit_uses_gtk==1) and (build_with_mozilla==0)', { 'dependencies': [ '../tools/gtk_clipboard_dump/gtk_clipboard_dump.gyp:*', '../tools/xdisplaycheck/xdisplaycheck.gyp:*', diff --git a/media/webrtc/trunk/build/filename_rules.gypi b/media/webrtc/trunk/build/filename_rules.gypi index 500a0d8e1890..844d5c5157c9 100644 --- a/media/webrtc/trunk/build/filename_rules.gypi +++ b/media/webrtc/trunk/build/filename_rules.gypi @@ -66,7 +66,7 @@ ['exclude', '(^|/)x11_[^/]*\\.(h|cc)$'], ], }], - ['<(toolkit_uses_gtk)!=1 or >(nacl_untrusted_build)==1', { + ['(<(toolkit_uses_gtk)!=1 or >(nacl_untrusted_build)==1) and (build_with_mozilla==0)', { 'sources/': [ ['exclude', '_gtk(_browsertest|_unittest)?\\.(h|cc)$'], ['exclude', '(^|/)gtk/'], diff --git a/media/webrtc/trunk/webrtc/build/OWNERS b/media/webrtc/trunk/webrtc/build/OWNERS new file mode 100644 index 000000000000..7e75a0fe0c31 --- /dev/null +++ b/media/webrtc/trunk/webrtc/build/OWNERS @@ -0,0 +1,4 @@ +fischman@webrtc.org +kjellander@webrtc.org +wu@webrtc.org + diff --git a/media/webrtc/trunk/webrtc/build/apk_tests.gyp b/media/webrtc/trunk/webrtc/build/apk_tests.gyp index 33fb7f762fc3..8464d70f7889 100644 --- a/media/webrtc/trunk/webrtc/build/apk_tests.gyp +++ b/media/webrtc/trunk/webrtc/build/apk_tests.gyp @@ -54,21 +54,7 @@ 'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)common_video_unittests<(SHARED_LIB_SUFFIX)', }, 'dependencies': [ - '<(webrtc_root)/common_video/common_video.gyp:common_video_unittests', - ], - 'includes': [ - '../../../build/apk_test.gypi', - ], - }, - { - 'target_name': 'metrics_unittests_apk', - 'type': 'none', - 'variables': { - 'test_suite_name': 'metrics_unittests', - 'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)metrics_unittests<(SHARED_LIB_SUFFIX)', - }, - 'dependencies': [ - '<(webrtc_root)/test/metrics.gyp:metrics_unittests', + '<(webrtc_root)/common_video/common_video_unittests.gyp:common_video_unittests', ], 'includes': [ '../../../build/apk_test.gypi', @@ -173,6 +159,20 @@ ], }, { + 'target_name': 'video_engine_tests_apk', + 'type': 'none', + 'variables': { + 'test_suite_name': 'video_engine_tests', + 'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)video_engine_tests<(SHARED_LIB_SUFFIX)', + }, + 'dependencies': [ + '<(webrtc_root)/webrtc.gyp:video_engine_tests', + ], + 'includes': [ + '../../../build/apk_test.gypi', + ], + }, + { 'target_name': 'voice_engine_unittests_apk', 'type': 'none', 'variables': { @@ -185,7 +185,21 @@ 'includes': [ '../../../build/apk_test.gypi', ], - }, + }, + { + 'target_name': 'webrtc_perf_tests_apk', + 'type': 'none', + 'variables': { + 'test_suite_name': 'webrtc_perf_tests', + 'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)webrtc_perf_tests<(SHARED_LIB_SUFFIX)', + }, + 'dependencies': [ + '<(webrtc_root)/webrtc.gyp:webrtc_perf_tests', + ], + 'includes': [ + '../../../build/apk_test.gypi', + ], + }, ], } diff --git a/media/webrtc/trunk/webrtc/build/apk_tests_noop.gyp b/media/webrtc/trunk/webrtc/build/apk_tests_noop.gyp index a86b159b2c12..daf86e34202e 100644 --- a/media/webrtc/trunk/webrtc/build/apk_tests_noop.gyp +++ b/media/webrtc/trunk/webrtc/build/apk_tests_noop.gyp @@ -21,10 +21,6 @@ 'target_name': 'common_video_unittests_apk', 'type': 'none', }, - { - 'target_name': 'metrics_unittests', - 'type': 'none', - }, { 'target_name': 'modules_tests_apk', 'type': 'none', @@ -53,9 +49,17 @@ 'target_name': 'video_engine_core_unittests_apk', 'type': 'none', }, + { + 'target_name': 'video_engine_tests_apk', + 'type': 'none', + }, { 'target_name': 'voice_engine_unittests_apk', 'type': 'none', }, + { + 'target_name': 'webrtc_perf_tests_apk', + 'type': 'none', + }, ], } diff --git a/media/webrtc/trunk/webrtc/build/common.gypi b/media/webrtc/trunk/webrtc/build/common.gypi index 102dcf7e8091..3ddb7e48f35f 100644 --- a/media/webrtc/trunk/webrtc/build/common.gypi +++ b/media/webrtc/trunk/webrtc/build/common.gypi @@ -14,8 +14,7 @@ 'variables': { 'variables': { 'variables': { - # This will be set to zero in the supplement.gypi triggered by a - # gclient hook in the standalone build. + # This will already be set to zero by supplement.gypi 'build_with_chromium%': 1, }, 'build_with_chromium%': '<(build_with_chromium)', @@ -25,14 +24,14 @@ 'build_with_libjingle': 1, 'webrtc_root%': '<(DEPTH)/third_party/webrtc', 'apk_tests_path%': '<(DEPTH)/third_party/webrtc/build/apk_tests.gyp', - 'import_isolate_path%': '<(DEPTH)/third_party/webrtc/build/import_isolate_chromium.gyp', 'modules_java_gyp_path%': '<(DEPTH)/third_party/webrtc/modules/modules_java_chromium.gyp', + 'gen_core_neon_offsets_gyp%': '<(DEPTH)/third_party/webrtc/modules/audio_processing/gen_core_neon_offsets_chromium.gyp', }, { 'build_with_libjingle%': 0, 'webrtc_root%': '<(DEPTH)/webrtc', 'apk_tests_path%': '<(DEPTH)/webrtc/build/apk_test_noop.gyp', - 'import_isolate_path%': '<(DEPTH)/webrtc/build/import_isolate_webrtc.gyp', 'modules_java_gyp_path%': '<(DEPTH)/webrtc/modules/modules_java.gyp', + 'gen_core_neon_offsets_gyp%':'<(DEPTH)/webrtc/modules/audio_processing/gen_core_neon_offsets.gyp', }], ], }, @@ -40,9 +39,8 @@ 'build_with_libjingle%': '<(build_with_libjingle)', 'webrtc_root%': '<(webrtc_root)', 'apk_tests_path%': '<(apk_tests_path)', - 'import_isolate_path%': '<(import_isolate_path)', 'modules_java_gyp_path%': '<(modules_java_gyp_path)', - + 'gen_core_neon_offsets_gyp%': '<(gen_core_neon_offsets_gyp)', 'webrtc_vp8_dir%': '<(webrtc_root)/modules/video_coding/codecs/vp8', 'webrtc_h264_dir%': '<(webrtc_root)/modules/video_coding/codecs/h264', 'rbe_components_path%': '<(webrtc_root)/modules/remote_bitrate_estimator', @@ -57,8 +55,8 @@ 'build_with_libjingle%': '<(build_with_libjingle)', 'webrtc_root%': '<(webrtc_root)', 'apk_tests_path%': '<(apk_tests_path)', - 'import_isolate_path%': '<(import_isolate_path)', 'modules_java_gyp_path%': '<(modules_java_gyp_path)', + 'gen_core_neon_offsets_gyp%': '<(gen_core_neon_offsets_gyp)', 'webrtc_vp8_dir%': '<(webrtc_vp8_dir)', 'webrtc_h264_dir%': '<(webrtc_h264_dir)', @@ -79,6 +77,9 @@ # third party code will still have the reduced warning settings. 'chromium_code': 1, + # Remote bitrate estimator logging/plotting. + 'enable_bwe_test_logging%': 0, + # Adds video support to dependencies shared by voice and video engine. # This should normally be enabled; the intended use is to disable only # when building voice engine exclusively. @@ -109,6 +110,7 @@ 'mips_arch_variant%': 'mips32r1', 'mips_dsp_rev%': 0, 'mips_fpu%' : 1, + 'enable_android_opensl%': 1, 'conditions': [ ['build_with_chromium==1', { @@ -125,11 +127,10 @@ # Exclude internal video render module in Chromium build. 'include_internal_video_render%': 0, - # Include ndk cpu features in Chromium build. - 'include_ndk_cpu_features%': 1, - # lazily allocate the ~4MB of trace message buffers if set 'enable_lazy_trace_alloc%': 0, + + 'include_ndk_cpu_features%': 0, }, { # Settings for the standalone (not-in-Chromium) build. # TODO(andrew): For now, disable the Chrome plugins, which causes a # flood of chromium-style warnings. Investigate enabling them: @@ -144,14 +145,10 @@ }], ['build_with_libjingle==1', { 'include_tests%': 0, - 'enable_tracing%': 0, - 'enable_android_opensl%': 0, + 'restrict_webrtc_logging%': 1, }, { 'include_tests%': 1, - 'enable_tracing%': 1, - # Switch between Android audio device OpenSL ES implementation - # and Java Implementation - 'enable_android_opensl%': 0, + 'restrict_webrtc_logging%': 0, }], ['OS=="linux"', { 'include_alsa_audio%': 1, @@ -180,8 +177,6 @@ }, 'target_defaults': { 'include_dirs': [ - # TODO(andrew): Remove '..' when we've added webrtc/ to include paths. - '..', # Allow includes to be prefixed with webrtc/ in case it is not an # immediate subdirectory of <(DEPTH). '../..', @@ -189,20 +184,14 @@ # use full paths (e.g. headers inside testing/ or third_party/). '<(DEPTH)', ], - 'defines': [ - # TODO(leozwang): Run this as a gclient hook rather than at build-time: - # http://code.google.com/p/webrtc/issues/detail?id=687 - 'WEBRTC_SVNREVISION="Unavailable_issue687"', - #'WEBRTC_SVNREVISION=" #include #include "webrtc/common_types.h" -#include "webrtc/video_engine/new_include/video_receive_stream.h" -#include "webrtc/video_engine/new_include/video_send_stream.h" +#include "webrtc/video_receive_stream.h" +#include "webrtc/video_send_stream.h" namespace webrtc { @@ -31,6 +31,19 @@ class PacketReceiver { virtual ~PacketReceiver() {} }; +// Callback interface for reporting when a system overuse is detected. +// The detection is based on the jitter of incoming captured frames. +class OveruseCallback { + public: + // Called as soon as an overuse is detected. + virtual void OnOveruse() = 0; + // Called periodically when the system is not overused any longer. + virtual void OnNormalUse() = 0; + + protected: + virtual ~OveruseCallback() {} +}; + // A Call instance can contain several send and/or receive streams. All streams // are assumed to have the same remote endpoint and will share bitrate estimates // etc. @@ -38,41 +51,48 @@ class Call { public: struct Config { explicit Config(newapi::Transport* send_transport) - : send_transport(send_transport), - overuse_detection(false), + : webrtc_config(NULL), + send_transport(send_transport), voice_engine(NULL), trace_callback(NULL), - trace_filter(kTraceNone) {} + trace_filter(kTraceDefault), + overuse_callback(NULL) {} + + webrtc::Config* webrtc_config; newapi::Transport* send_transport; - bool overuse_detection; // VoiceEngine used for audio/video synchronization for this Call. VoiceEngine* voice_engine; TraceCallback* trace_callback; uint32_t trace_filter; + + // Callback for overuse and normal usage based on the jitter of incoming + // captured frames. 'NULL' disables the callback. + OveruseCallback* overuse_callback; }; static Call* Create(const Call::Config& config); + static Call* Create(const Call::Config& config, + const webrtc::Config& webrtc_config); + virtual std::vector GetVideoCodecs() = 0; virtual VideoSendStream::Config GetDefaultSendConfig() = 0; - virtual VideoSendStream* CreateSendStream( + virtual VideoSendStream* CreateVideoSendStream( const VideoSendStream::Config& config) = 0; - // Returns the internal state of the send stream, for resume sending with a - // new stream with different settings. - // Note: Only the last returned send-stream state is valid. - virtual SendStreamState* DestroySendStream(VideoSendStream* send_stream) = 0; + virtual void DestroyVideoSendStream(VideoSendStream* send_stream) = 0; virtual VideoReceiveStream::Config GetDefaultReceiveConfig() = 0; - virtual VideoReceiveStream* CreateReceiveStream( + virtual VideoReceiveStream* CreateVideoReceiveStream( const VideoReceiveStream::Config& config) = 0; - virtual void DestroyReceiveStream(VideoReceiveStream* receive_stream) = 0; + virtual void DestroyVideoReceiveStream( + VideoReceiveStream* receive_stream) = 0; // All received RTP and RTCP packets for the call should be inserted to this // PacketReceiver. The PacketReceiver pointer is valid as long as the @@ -91,4 +111,4 @@ class Call { }; } // namespace webrtc -#endif // WEBRTC_VIDEO_ENGINE_NEW_INCLUDE_CALL_H_ +#endif // WEBRTC_CALL_H_ diff --git a/media/webrtc/trunk/webrtc/common_audio/common_audio.gyp b/media/webrtc/trunk/webrtc/common_audio/common_audio.gyp index 605e0ce2392e..af0391817865 100644 --- a/media/webrtc/trunk/webrtc/common_audio/common_audio.gyp +++ b/media/webrtc/trunk/webrtc/common_audio/common_audio.gyp @@ -103,7 +103,7 @@ 'signal_processing/spl_sqrt_floor.c', ], 'conditions': [ - ['armv7==1', { + ['arm_version==7', { 'dependencies': ['common_audio_neon',], 'sources': [ 'signal_processing/filter_ar_fast_q12_armv7.S', @@ -162,7 +162,7 @@ }, ], # targets }], - ['(target_arch=="arm" and armv7==1) or target_arch=="armv7"', { + ['(target_arch=="arm" and arm_version==7) or target_arch=="armv7"', { 'targets': [ { 'target_name': 'common_audio_neon', @@ -237,10 +237,10 @@ 'target_name': 'common_audio_unittests_run', 'type': 'none', 'dependencies': [ - '<(import_isolate_path):import_isolate_gypi', 'common_audio_unittests', ], 'includes': [ + '../build/isolate.gypi', 'common_audio_unittests.isolate', ], 'sources': [ diff --git a/media/webrtc/trunk/webrtc/common_audio/common_audio_unittests.isolate b/media/webrtc/trunk/webrtc/common_audio/common_audio_unittests.isolate index e6b1db6e4554..49f1e984b0e8 100644 --- a/media/webrtc/trunk/webrtc/common_audio/common_audio_unittests.isolate +++ b/media/webrtc/trunk/webrtc/common_audio/common_audio_unittests.isolate @@ -21,16 +21,15 @@ 'variables': { 'command': [ '../../testing/test_env.py', - '../../tools/swarm_client/googletest/run_test_cases.py', '<(PRODUCT_DIR)/common_audio_unittests<(EXECUTABLE_SUFFIX)', ], 'isolate_dependency_tracked': [ '../../testing/test_env.py', - '../../tools/swarm_client/run_isolated.py', - '../../tools/swarm_client/googletest/run_test_cases.py', - '../../tools/swarm_client/third_party/upload.py', '<(PRODUCT_DIR)/common_audio_unittests<(EXECUTABLE_SUFFIX)', ], + 'isolate_dependency_untracked': [ + '../../tools/swarming_client/', + ], }, }], ], diff --git a/media/webrtc/trunk/webrtc/common_audio/resampler/push_resampler.cc b/media/webrtc/trunk/webrtc/common_audio/resampler/push_resampler.cc index 81206703963c..e947447bb6d8 100644 --- a/media/webrtc/trunk/webrtc/common_audio/resampler/push_resampler.cc +++ b/media/webrtc/trunk/webrtc/common_audio/resampler/push_resampler.cc @@ -18,9 +18,7 @@ namespace webrtc { PushResampler::PushResampler() - : sinc_resampler_(NULL), - sinc_resampler_right_(NULL), - src_sample_rate_hz_(0), + : src_sample_rate_hz_(0), dst_sample_rate_hz_(0), num_channels_(0), src_left_(NULL), diff --git a/media/webrtc/trunk/webrtc/common_audio/resampler/push_sinc_resampler.cc b/media/webrtc/trunk/webrtc/common_audio/resampler/push_sinc_resampler.cc index 886d7633c06c..1fb72dc76cfc 100644 --- a/media/webrtc/trunk/webrtc/common_audio/resampler/push_sinc_resampler.cc +++ b/media/webrtc/trunk/webrtc/common_audio/resampler/push_sinc_resampler.cc @@ -17,14 +17,13 @@ namespace webrtc { PushSincResampler::PushSincResampler(int source_frames, int destination_frames) - : resampler_(NULL), + : resampler_(new SincResampler(source_frames * 1.0 / destination_frames, + source_frames, this)), float_buffer_(new float[destination_frames]), source_ptr_(NULL), destination_frames_(destination_frames), first_pass_(true), source_available_(0) { - resampler_.reset(new SincResampler(source_frames * 1.0 / destination_frames, - source_frames, this)); } PushSincResampler::~PushSincResampler() { diff --git a/media/webrtc/trunk/webrtc/common_audio/signal_processing/include/signal_processing_library.h b/media/webrtc/trunk/webrtc/common_audio/signal_processing/include/signal_processing_library.h index c567beba5eab..60c60bc96398 100644 --- a/media/webrtc/trunk/webrtc/common_audio/signal_processing/include/signal_processing_library.h +++ b/media/webrtc/trunk/webrtc/common_audio/signal_processing/include/signal_processing_library.h @@ -143,9 +143,6 @@ #define WEBRTC_SPL_RSHIFT_U32(x, c) ((uint32_t)(x) >> (c)) #define WEBRTC_SPL_LSHIFT_U32(x, c) ((uint32_t)(x) << (c)) -#define WEBRTC_SPL_VNEW(t, n) (t *) malloc (sizeof (t) * (n)) -#define WEBRTC_SPL_FREE free - #define WEBRTC_SPL_RAND(a) \ ((int16_t)(WEBRTC_SPL_MUL_16_16_RSFT((a), 18816, 7) & 0x00007fff)) @@ -996,12 +993,14 @@ void WebRtcSpl_UpsampleBy2(const int16_t* in, int16_t len, * END OF RESAMPLING FUNCTIONS ************************************************************/ void WebRtcSpl_AnalysisQMF(const int16_t* in_data, + int in_data_length, int16_t* low_band, int16_t* high_band, int32_t* filter_state1, int32_t* filter_state2); void WebRtcSpl_SynthesisQMF(const int16_t* low_band, const int16_t* high_band, + int band_length, int16_t* out_data, int32_t* filter_state1, int32_t* filter_state2); diff --git a/media/webrtc/trunk/webrtc/common_audio/signal_processing/splitting_filter.c b/media/webrtc/trunk/webrtc/common_audio/signal_processing/splitting_filter.c index cf6ec9d7b166..dbda042277c0 100644 --- a/media/webrtc/trunk/webrtc/common_audio/signal_processing/splitting_filter.c +++ b/media/webrtc/trunk/webrtc/common_audio/signal_processing/splitting_filter.c @@ -15,10 +15,12 @@ #include "webrtc/common_audio/signal_processing/include/signal_processing_library.h" -// Number of samples in a low/high-band frame. +#include + +// Maximum number of samples in a low/high-band frame. enum { - kBandFrameLength = 160 + kMaxBandFrameLength = 240 // 10 ms at 48 kHz. }; // QMF filter coefficients in Q16. @@ -116,34 +118,37 @@ void WebRtcSpl_AllPassQMF(int32_t* in_data, int16_t data_length, filter_state[5] = out_data[data_length - 1]; // y[N-1], becomes y[-1] next time } -void WebRtcSpl_AnalysisQMF(const int16_t* in_data, int16_t* low_band, - int16_t* high_band, int32_t* filter_state1, - int32_t* filter_state2) +void WebRtcSpl_AnalysisQMF(const int16_t* in_data, int in_data_length, + int16_t* low_band, int16_t* high_band, + int32_t* filter_state1, int32_t* filter_state2) { int16_t i; int16_t k; int32_t tmp; - int32_t half_in1[kBandFrameLength]; - int32_t half_in2[kBandFrameLength]; - int32_t filter1[kBandFrameLength]; - int32_t filter2[kBandFrameLength]; + int32_t half_in1[kMaxBandFrameLength]; + int32_t half_in2[kMaxBandFrameLength]; + int32_t filter1[kMaxBandFrameLength]; + int32_t filter2[kMaxBandFrameLength]; + const int band_length = in_data_length / 2; + assert(in_data_length % 2 == 0); + assert(band_length <= kMaxBandFrameLength); // Split even and odd samples. Also shift them to Q10. - for (i = 0, k = 0; i < kBandFrameLength; i++, k += 2) + for (i = 0, k = 0; i < band_length; i++, k += 2) { half_in2[i] = WEBRTC_SPL_LSHIFT_W32((int32_t)in_data[k], 10); half_in1[i] = WEBRTC_SPL_LSHIFT_W32((int32_t)in_data[k + 1], 10); } // All pass filter even and odd samples, independently. - WebRtcSpl_AllPassQMF(half_in1, kBandFrameLength, filter1, WebRtcSpl_kAllPassFilter1, - filter_state1); - WebRtcSpl_AllPassQMF(half_in2, kBandFrameLength, filter2, WebRtcSpl_kAllPassFilter2, - filter_state2); + WebRtcSpl_AllPassQMF(half_in1, band_length, filter1, + WebRtcSpl_kAllPassFilter1, filter_state1); + WebRtcSpl_AllPassQMF(half_in2, band_length, filter2, + WebRtcSpl_kAllPassFilter2, filter_state2); // Take the sum and difference of filtered version of odd and even // branches to get upper & lower band. - for (i = 0; i < kBandFrameLength; i++) + for (i = 0; i < band_length; i++) { tmp = filter1[i] + filter2[i] + 1024; tmp = WEBRTC_SPL_RSHIFT_W32(tmp, 11); @@ -156,20 +161,21 @@ void WebRtcSpl_AnalysisQMF(const int16_t* in_data, int16_t* low_band, } void WebRtcSpl_SynthesisQMF(const int16_t* low_band, const int16_t* high_band, - int16_t* out_data, int32_t* filter_state1, - int32_t* filter_state2) + int band_length, int16_t* out_data, + int32_t* filter_state1, int32_t* filter_state2) { int32_t tmp; - int32_t half_in1[kBandFrameLength]; - int32_t half_in2[kBandFrameLength]; - int32_t filter1[kBandFrameLength]; - int32_t filter2[kBandFrameLength]; + int32_t half_in1[kMaxBandFrameLength]; + int32_t half_in2[kMaxBandFrameLength]; + int32_t filter1[kMaxBandFrameLength]; + int32_t filter2[kMaxBandFrameLength]; int16_t i; int16_t k; + assert(band_length <= kMaxBandFrameLength); // Obtain the sum and difference channels out of upper and lower-band channels. // Also shift to Q10 domain. - for (i = 0; i < kBandFrameLength; i++) + for (i = 0; i < band_length; i++) { tmp = (int32_t)low_band[i] + (int32_t)high_band[i]; half_in1[i] = WEBRTC_SPL_LSHIFT_W32(tmp, 10); @@ -178,15 +184,15 @@ void WebRtcSpl_SynthesisQMF(const int16_t* low_band, const int16_t* high_band, } // all-pass filter the sum and difference channels - WebRtcSpl_AllPassQMF(half_in1, kBandFrameLength, filter1, WebRtcSpl_kAllPassFilter2, - filter_state1); - WebRtcSpl_AllPassQMF(half_in2, kBandFrameLength, filter2, WebRtcSpl_kAllPassFilter1, - filter_state2); + WebRtcSpl_AllPassQMF(half_in1, band_length, filter1, + WebRtcSpl_kAllPassFilter2, filter_state1); + WebRtcSpl_AllPassQMF(half_in2, band_length, filter2, + WebRtcSpl_kAllPassFilter1, filter_state2); // The filtered signals are even and odd samples of the output. Combine // them. The signals are Q10 should shift them back to Q0 and take care of // saturation. - for (i = 0, k = 0; i < kBandFrameLength; i++) + for (i = 0, k = 0; i < band_length; i++) { tmp = WEBRTC_SPL_RSHIFT_W32(filter2[i] + 512, 10); out_data[k++] = WebRtcSpl_SatW32ToW16(tmp); diff --git a/media/webrtc/trunk/webrtc/common_types.h b/media/webrtc/trunk/webrtc/common_types.h index 58b402d2cb35..fe8872b80379 100644 --- a/media/webrtc/trunk/webrtc/common_types.h +++ b/media/webrtc/trunk/webrtc/common_types.h @@ -70,27 +70,28 @@ protected: enum TraceModule { - kTraceUndefined = 0, + kTraceUndefined = 0, // not a module, triggered from the engine code - kTraceVoice = 0x0001, + kTraceVoice = 0x0001, // not a module, triggered from the engine code - kTraceVideo = 0x0002, + kTraceVideo = 0x0002, // not a module, triggered from the utility code - kTraceUtility = 0x0003, - kTraceRtpRtcp = 0x0004, - kTraceTransport = 0x0005, - kTraceSrtp = 0x0006, - kTraceAudioCoding = 0x0007, - kTraceAudioMixerServer = 0x0008, - kTraceAudioMixerClient = 0x0009, - kTraceFile = 0x000a, - kTraceAudioProcessing = 0x000b, - kTraceVideoCoding = 0x0010, - kTraceVideoMixer = 0x0011, - kTraceAudioDevice = 0x0012, - kTraceVideoRenderer = 0x0014, - kTraceVideoCapture = 0x0015, - kTraceVideoPreocessing = 0x0016 + kTraceUtility = 0x0003, + kTraceRtpRtcp = 0x0004, + kTraceTransport = 0x0005, + kTraceSrtp = 0x0006, + kTraceAudioCoding = 0x0007, + kTraceAudioMixerServer = 0x0008, + kTraceAudioMixerClient = 0x0009, + kTraceFile = 0x000a, + kTraceAudioProcessing = 0x000b, + kTraceVideoCoding = 0x0010, + kTraceVideoMixer = 0x0011, + kTraceAudioDevice = 0x0012, + kTraceVideoRenderer = 0x0014, + kTraceVideoCapture = 0x0015, + kTraceVideoPreocessing = 0x0016, + kTraceRemoteBitrateEstimator = 0x0017, }; enum TraceLevel @@ -139,7 +140,6 @@ enum FileFormats kFileFormatPcm32kHzFile = 9 }; - enum ProcessingTypes { kPlaybackPerChannel = 0, @@ -149,6 +149,15 @@ enum ProcessingTypes kRecordingPreprocessing }; +enum FrameType +{ + kFrameEmpty = 0, + kAudioFrameSpeech = 1, + kAudioFrameCN = 2, + kVideoFrameKey = 3, // independent frame + kVideoFrameDelta = 4, // depends on the previus frame +}; + // Interface for encrypting and decrypting regular data and rtp/rtcp packets. // Implement this interface if you wish to provide an encryption scheme to // the voice or video engines. @@ -226,6 +235,82 @@ protected: Transport() {} }; +// Statistics for an RTCP channel +struct RtcpStatistics { + RtcpStatistics() + : fraction_lost(0), + cumulative_lost(0), + extended_max_sequence_number(0), + jitter(0) {} + + uint8_t fraction_lost; + uint32_t cumulative_lost; + uint32_t extended_max_sequence_number; + uint32_t jitter; +}; + +// Callback, called whenever a new rtcp report block is transmitted. +class RtcpStatisticsCallback { + public: + virtual ~RtcpStatisticsCallback() {} + + virtual void StatisticsUpdated(const RtcpStatistics& statistics, + uint32_t ssrc) = 0; +}; + +// Data usage statistics for a (rtp) stream +struct StreamDataCounters { + StreamDataCounters() + : bytes(0), + header_bytes(0), + padding_bytes(0), + packets(0), + retransmitted_packets(0), + fec_packets(0) {} + + uint32_t bytes; // Payload bytes, excluding RTP headers and padding. + uint32_t header_bytes; // Number of bytes used by RTP headers. + uint32_t padding_bytes; // Number of padding bytes. + uint32_t packets; // Number of packets. + uint32_t retransmitted_packets; // Number of retransmitted packets. + uint32_t fec_packets; // Number of redundancy packets. +}; + +// Callback, called whenever byte/packet counts have been updated. +class StreamDataCountersCallback { + public: + virtual ~StreamDataCountersCallback() {} + + virtual void DataCountersUpdated(const StreamDataCounters& counters, + uint32_t ssrc) = 0; +}; + +// Rate statistics for a stream +struct BitrateStatistics { + BitrateStatistics() : bitrate_bps(0), packet_rate(0), timestamp_ms(0) {} + + uint32_t bitrate_bps; // Bitrate in bits per second. + uint32_t packet_rate; // Packet rate in packets per second. + uint64_t timestamp_ms; // Ntp timestamp in ms at time of rate estimation. +}; + +// Callback, used to notify an observer whenever new rates have been estimated. +class BitrateStatisticsObserver { + public: + virtual ~BitrateStatisticsObserver() {} + + virtual void Notify(const BitrateStatistics& stats, uint32_t ssrc) = 0; +}; + +// Callback, used to notify an observer whenever frame counts have been updated +class FrameCountObserver { + public: + virtual ~FrameCountObserver() {} + virtual void FrameCountUpdated(FrameType frame_type, + uint32_t frame_count, + const unsigned int ssrc) = 0; +}; + // ================================================================== // Voice specific types // ================================================================== @@ -241,17 +326,6 @@ struct CodecInst int rate; // bits/sec unlike {start,min,max}Bitrate elsewhere in this file! }; -enum FrameType -{ - kFrameEmpty = 0, - kAudioFrameSpeech = 1, - kAudioFrameCN = 2, - kVideoFrameKey = 3, // independent frame - kVideoFrameDelta = 4, // depends on the previus frame - kVideoFrameGolden = 5, // depends on a old known previus frame - kVideoFrameAltRef = 6 -}; - // RTP enum {kRtpCsrcSize = 15}; // RFC 3550 page 13 @@ -310,6 +384,25 @@ struct NetworkStatistics // NETEQ statistics int addedSamples; }; +// Statistics for calls to AudioCodingModule::PlayoutData10Ms(). +struct AudioDecodingCallStats { + AudioDecodingCallStats() + : calls_to_silence_generator(0), + calls_to_neteq(0), + decoded_normal(0), + decoded_plc(0), + decoded_cng(0), + decoded_plc_cng(0) {} + + int calls_to_silence_generator; // Number of calls where silence generated, + // and NetEq was disengaged from decoding. + int calls_to_neteq; // Number of calls to NetEq. + int decoded_normal; // Number of calls where audio RTP packet decoded. + int decoded_plc; // Number of calls resulted in PLC. + int decoded_cng; // Number of calls where comfort noise generated due to DTX. + int decoded_plc_cng; // Number of calls resulted where PLC faded to CNG. +}; + typedef struct { int min; // minumum @@ -522,13 +615,9 @@ struct VideoCodecH264 size_t ppsLen; }; -// Unknown specific -struct VideoCodecGeneric -{ -}; - // Video codec types -enum VideoCodecType { +enum VideoCodecType +{ kVideoCodecVP8, kVideoCodecH264, kVideoCodecI420, @@ -542,7 +631,6 @@ union VideoCodecUnion { VideoCodecVP8 VP8; VideoCodecH264 H264; - VideoCodecGeneric Generic; }; @@ -638,6 +726,22 @@ public: virtual ~CPULoadStateCallbackInvoker() {}; }; +// This structure will have the information about when packet is actually +// received by socket. +struct PacketTime { + PacketTime() : timestamp(-1), max_error_us(-1) {} + PacketTime(int64_t timestamp, int64_t max_error_us) + : timestamp(timestamp), max_error_us(max_error_us) { + } + + int64_t timestamp; // Receive time after socket delivers the data. + int64_t max_error_us; // Earliest possible time the data could have arrived, + // indicating the potential error in the |timestamp| + // value,in case the system is busy. + // For example, the time of the last select() call. + // If unknown, this value will be set to zero. +}; + } // namespace webrtc #endif // WEBRTC_COMMON_TYPES_H_ diff --git a/media/webrtc/trunk/webrtc/common_video/common_video.gyp b/media/webrtc/trunk/webrtc/common_video/common_video.gyp index 2ba168101985..97cb94197e2a 100644 --- a/media/webrtc/trunk/webrtc/common_video/common_video.gyp +++ b/media/webrtc/trunk/webrtc/common_video/common_video.gyp @@ -49,84 +49,4 @@ ], }, ], # targets - 'conditions': [ - ['include_tests==1', { - 'targets': [ - { - 'target_name': 'frame_generator', - 'type': 'static_library', - 'sources': [ - 'test/frame_generator.h', - 'test/frame_generator.cc', - ], - 'dependencies': [ - 'common_video', - ], - }, - { - 'target_name': 'common_video_unittests', - 'type': '<(gtest_target_type)', - 'dependencies': [ - 'common_video', - '<(DEPTH)/testing/gtest.gyp:gtest', - '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', - '<(webrtc_root)/test/test.gyp:test_support_main', - ], - 'sources': [ - 'i420_video_frame_unittest.cc', - 'libyuv/libyuv_unittest.cc', - 'libyuv/scaler_unittest.cc', - 'plane_unittest.cc', - 'texture_video_frame_unittest.cc' - ], - # Disable warnings to enable Win64 build, issue 1323. - 'msvs_disabled_warnings': [ - 4267, # size_t to int truncation. - ], - 'conditions': [ - # TODO(henrike): remove build_with_chromium==1 when the bots are - # using Chromium's buildbots. - ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', { - 'dependencies': [ - '<(DEPTH)/testing/android/native_test.gyp:native_test_native_code', - ], - }], - ], - }, - ], # targets - 'conditions': [ - # TODO(henrike): remove build_with_chromium==1 when the bots are using - # Chromium's buildbots. - ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', { - 'targets': [ - { - 'target_name': 'common_video_unittests_apk_target', - 'type': 'none', - 'dependencies': [ - '<(apk_tests_path):common_video_unittests_apk', - ], - }, - ], - }], - ['test_isolation_mode != "noop"', { - 'targets': [ - { - 'target_name': 'common_video_unittests_run', - 'type': 'none', - 'dependencies': [ - '<(import_isolate_path):import_isolate_gypi', - 'common_video_unittests', - ], - 'includes': [ - 'common_video_unittests.isolate', - ], - 'sources': [ - 'common_video_unittests.isolate', - ], - }, - ], - }], - ], - }], # include_tests - ], } diff --git a/media/webrtc/trunk/webrtc/common_video/common_video_unittests.gyp b/media/webrtc/trunk/webrtc/common_video/common_video_unittests.gyp new file mode 100644 index 000000000000..9523361cda88 --- /dev/null +++ b/media/webrtc/trunk/webrtc/common_video/common_video_unittests.gyp @@ -0,0 +1,76 @@ +# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +{ + 'includes': ['../build/common.gypi'], + 'targets': [ + { + 'target_name': 'common_video_unittests', + 'type': '<(gtest_target_type)', + 'dependencies': [ + '<(webrtc_root)/common_video/common_video.gyp:common_video', + '<(DEPTH)/testing/gtest.gyp:gtest', + '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', + '<(webrtc_root)/test/test.gyp:test_support_main', + ], + 'sources': [ + 'i420_video_frame_unittest.cc', + 'libyuv/libyuv_unittest.cc', + 'libyuv/scaler_unittest.cc', + 'plane_unittest.cc', + 'texture_video_frame_unittest.cc' + ], + # Disable warnings to enable Win64 build, issue 1323. + 'msvs_disabled_warnings': [ + 4267, # size_t to int truncation. + ], + 'conditions': [ + # TODO(henrike): remove build_with_chromium==1 when the bots are + # using Chromium's buildbots. + ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', { + 'dependencies': [ + '<(DEPTH)/testing/android/native_test.gyp:native_test_native_code', + ], + }], + ], + }, + ], # targets + 'conditions': [ + # TODO(henrike): remove build_with_chromium==1 when the bots are using + # Chromium's buildbots. + ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', { + 'targets': [ + { + 'target_name': 'common_video_unittests_apk_target', + 'type': 'none', + 'dependencies': [ + '<(apk_tests_path):common_video_unittests_apk', + ], + }, + ], + }], + ['test_isolation_mode != "noop"', { + 'targets': [ + { + 'target_name': 'common_video_unittests_run', + 'type': 'none', + 'dependencies': [ + 'common_video_unittests', + ], + 'includes': [ + '../build/isolate.gypi', + 'common_video_unittests.isolate', + ], + 'sources': [ + 'common_video_unittests.isolate', + ], + }, + ], + }], + ], +} diff --git a/media/webrtc/trunk/webrtc/common_video/common_video_unittests.isolate b/media/webrtc/trunk/webrtc/common_video/common_video_unittests.isolate index e96645a87c62..a95bd4144857 100644 --- a/media/webrtc/trunk/webrtc/common_video/common_video_unittests.isolate +++ b/media/webrtc/trunk/webrtc/common_video/common_video_unittests.isolate @@ -21,18 +21,17 @@ 'variables': { 'command': [ '../../testing/test_env.py', - '../../tools/swarm_client/googletest/run_test_cases.py', '<(PRODUCT_DIR)/common_video_unittests<(EXECUTABLE_SUFFIX)', ], 'isolate_dependency_tracked': [ '../../DEPS', '../../resources/foreman_cif.yuv', '../../testing/test_env.py', - '../../tools/swarm_client/googletest/run_test_cases.py', - '../../tools/swarm_client/run_isolated.py', - '../../tools/swarm_client/third_party/upload.py', '<(PRODUCT_DIR)/common_video_unittests<(EXECUTABLE_SUFFIX)', ], + 'isolate_dependency_untracked': [ + '../../tools/swarming_client/', + ], }, }], ], diff --git a/media/webrtc/trunk/webrtc/video_engine/new_include/config.h b/media/webrtc/trunk/webrtc/config.h similarity index 75% rename from media/webrtc/trunk/webrtc/video_engine/new_include/config.h rename to media/webrtc/trunk/webrtc/config.h index d19c8d971c1d..105d9a546f88 100644 --- a/media/webrtc/trunk/webrtc/video_engine/new_include/config.h +++ b/media/webrtc/trunk/webrtc/config.h @@ -8,12 +8,17 @@ * be found in the AUTHORS file in the root of the source tree. */ +// TODO(pbos): Move Config from common.h to here. + #ifndef WEBRTC_VIDEO_ENGINE_NEW_INCLUDE_CONFIG_H_ #define WEBRTC_VIDEO_ENGINE_NEW_INCLUDE_CONFIG_H_ #include #include +#include "webrtc/common_types.h" +#include "webrtc/typedefs.h" + namespace webrtc { struct RtpStatistics { @@ -29,14 +34,14 @@ struct RtpStatistics { std::string c_name; }; -namespace newapi { -// RTCP mode to use. Compound mode is described by RFC 4585 and reduced-size -// RTCP mode is described by RFC 5506. -enum RtcpMode { - kRtcpCompound, - kRtcpReducedSize +struct StreamStats { + StreamStats() : key_frames(0), delta_frames(0), bitrate_bps(0) {} + uint32_t key_frames; + uint32_t delta_frames; + int32_t bitrate_bps; + StreamDataCounters rtp_stats; + RtcpStatistics rtcp_stats; }; -} // namespace newapi // Settings for NACK, see RFC 4585 for details. struct NackConfig { @@ -59,21 +64,10 @@ struct FecConfig { int red_payload_type; }; -// Settings for RTP retransmission payload format, see RFC 4588 for details. -struct RtxConfig { - RtxConfig() : rtx_payload_type(0), video_payload_type(0) {} - // SSRCs to use for the RTX streams. - std::vector ssrcs; - - // Payload type to use for the RTX stream. - int rtx_payload_type; - - // Original video payload this RTX stream is used for. - int video_payload_type; -}; - // RTP header extension to use for the video stream, see RFC 5285. struct RtpExtension { + static const char* kTOffset; + static const char* kAbsSendTime; RtpExtension(const char* name, int id) : name(name), id(id) {} // TODO(mflodman) Add API to query supported extensions. std::string name; diff --git a/media/webrtc/trunk/webrtc/engine_configurations.h b/media/webrtc/trunk/webrtc/engine_configurations.h index bd5eaa7510d2..acdbe9a3c451 100644 --- a/media/webrtc/trunk/webrtc/engine_configurations.h +++ b/media/webrtc/trunk/webrtc/engine_configurations.h @@ -104,7 +104,6 @@ #define WEBRTC_VIDEO_ENGINE_CAPTURE_API #define WEBRTC_VIDEO_ENGINE_CODEC_API -#define WEBRTC_VIDEO_ENGINE_ENCRYPTION_API #define WEBRTC_VIDEO_ENGINE_IMAGE_PROCESS_API #define WEBRTC_VIDEO_ENGINE_RENDER_API #define WEBRTC_VIDEO_ENGINE_RTP_RTCP_API diff --git a/media/webrtc/trunk/webrtc/experiments.h b/media/webrtc/trunk/webrtc/experiments.h new file mode 100644 index 000000000000..309c7b4f3bb6 --- /dev/null +++ b/media/webrtc/trunk/webrtc/experiments.h @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_EXPERIMENTS_H_ +#define WEBRTC_EXPERIMENTS_H_ + +namespace webrtc { +struct PaddingStrategy { + PaddingStrategy() + : redundant_payloads(false) {} + explicit PaddingStrategy(bool redundant_payloads) + : redundant_payloads(redundant_payloads) {} + virtual ~PaddingStrategy() {} + + const bool redundant_payloads; +}; + +struct RemoteBitrateEstimatorMinRate { + RemoteBitrateEstimatorMinRate() : min_rate(30000) {} + RemoteBitrateEstimatorMinRate(uint32_t min_rate) : min_rate(min_rate) {} + + uint32_t min_rate; +}; + +struct SkipEncodingUnusedStreams { + SkipEncodingUnusedStreams() : enabled(false) {} + explicit SkipEncodingUnusedStreams(bool set_enabled) + : enabled(set_enabled) {} + virtual ~SkipEncodingUnusedStreams() {} + + const bool enabled; +}; +} // namespace webrtc +#endif // WEBRTC_EXPERIMENTS_H_ diff --git a/media/webrtc/trunk/webrtc/video_engine/new_include/frame_callback.h b/media/webrtc/trunk/webrtc/frame_callback.h similarity index 73% rename from media/webrtc/trunk/webrtc/video_engine/new_include/frame_callback.h rename to media/webrtc/trunk/webrtc/frame_callback.h index 7f542450ab64..1d73f4a0472d 100644 --- a/media/webrtc/trunk/webrtc/video_engine/new_include/frame_callback.h +++ b/media/webrtc/trunk/webrtc/frame_callback.h @@ -11,11 +11,24 @@ #ifndef WEBRTC_VIDEO_ENGINE_NEW_INCLUDE_FRAME_CALLBACK_H_ #define WEBRTC_VIDEO_ENGINE_NEW_INCLUDE_FRAME_CALLBACK_H_ +#include + +#include "webrtc/common_types.h" + namespace webrtc { class I420VideoFrame; -struct EncodedFrame; +struct EncodedFrame { + public: + EncodedFrame() : data_(NULL), length_(0), frame_type_(kFrameEmpty) {} + EncodedFrame(const uint8_t* data, size_t length, FrameType frame_type) + : data_(data), length_(length), frame_type_(frame_type) {} + + const uint8_t* data_; + const size_t length_; + const FrameType frame_type_; +}; class I420FrameCallback { public: @@ -34,6 +47,7 @@ class EncodedFrameObserver { protected: virtual ~EncodedFrameObserver() {} }; + } // namespace webrtc #endif // WEBRTC_VIDEO_ENGINE_NEW_INCLUDE_FRAME_CALLBACK_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/cng/cng.gypi b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/cng/cng.gypi index 9bcde5918f4b..3ad7dd320132 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/cng/cng.gypi +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/cng/cng.gypi @@ -16,10 +16,12 @@ ], 'include_dirs': [ 'include', + '<(webrtc_root)', ], 'direct_dependent_settings': { 'include_dirs': [ 'include', + '<(webrtc_root)', ], }, 'sources': [ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/g711/g711.gypi b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/g711/g711.gypi index 856e0197409c..c39b4af1d1eb 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/g711/g711.gypi +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/g711/g711.gypi @@ -13,10 +13,12 @@ 'type': 'static_library', 'include_dirs': [ 'include', + '<(webrtc_root)', ], 'direct_dependent_settings': { 'include_dirs': [ 'include', + '<(webrtc_root)', ], }, 'sources': [ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/g711/g711_interface.c b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/g711/g711_interface.c index 9ef7884c58f2..087e3e11cbdc 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/g711/g711_interface.c +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/g711/g711_interface.c @@ -31,7 +31,7 @@ int16_t WebRtcG711_EncodeA(void* state, for (n = 0; n < len; n++) { tempVal = (uint16_t) linear_to_alaw(speechIn[n]); -#ifdef WEBRTC_BIG_ENDIAN +#ifdef WEBRTC_ARCH_BIG_ENDIAN if ((n & 0x1) == 1) { encoded[n >> 1] |= ((uint16_t) tempVal); } else { @@ -69,7 +69,7 @@ int16_t WebRtcG711_EncodeU(void* state, for (n = 0; n < len; n++) { tempVal = (uint16_t) linear_to_ulaw(speechIn[n]); -#ifdef WEBRTC_BIG_ENDIAN +#ifdef WEBRTC_ARCH_BIG_ENDIAN if ((n & 0x1) == 1) { encoded[n >> 1] |= ((uint16_t) tempVal); } else { @@ -103,7 +103,7 @@ int16_t WebRtcG711_DecodeA(void* state, } for (n = 0; n < len; n++) { -#ifdef WEBRTC_BIG_ENDIAN +#ifdef WEBRTC_ARCH_BIG_ENDIAN if ((n & 0x1) == 1) { tempVal = ((uint16_t) encoded[n >> 1] & 0xFF); } else { @@ -140,7 +140,7 @@ int16_t WebRtcG711_DecodeU(void* state, } for (n = 0; n < len; n++) { -#ifdef WEBRTC_BIG_ENDIAN +#ifdef WEBRTC_ARCH_BIG_ENDIAN if ((n & 0x1) == 1) { tempVal = ((uint16_t) encoded[n >> 1] & 0xFF); } else { diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/g722/g722.gypi b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/g722/g722.gypi index ba0dcb4f7f23..5876f9feb63a 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/g722/g722.gypi +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/g722/g722.gypi @@ -12,10 +12,12 @@ 'type': 'static_library', 'include_dirs': [ 'include', + '<(webrtc_root)', ], 'direct_dependent_settings': { 'include_dirs': [ 'include', + '<(webrtc_root)', ], }, 'sources': [ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/ilbc/decode.c b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/ilbc/decode.c index 5da968543854..febd4ceb05f4 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/ilbc/decode.c +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/ilbc/decode.c @@ -28,7 +28,7 @@ #include "decode_residual.h" #include "unpack_bits.h" #include "hp_output.h" -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN #include "swap_bytes.h" #endif @@ -54,7 +54,7 @@ void WebRtcIlbcfix_DecodeImpl( int16_t PLCresidual[BLOCKL_MAX + LPC_FILTERORDER]; int16_t syntdenum[NSUB_MAX*(LPC_FILTERORDER+1)]; int16_t PLClpc[LPC_FILTERORDER + 1]; -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN uint16_t swapped[NO_OF_WORDS_30MS]; #endif iLBC_bits *iLBCbits_inst = (iLBC_bits*)PLCresidual; @@ -68,7 +68,7 @@ void WebRtcIlbcfix_DecodeImpl( /* Unpacketize bits into parameters */ -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN WebRtcIlbcfix_SwapBytes(bytes, iLBCdec_inst->no_of_words, swapped); last_bit = WebRtcIlbcfix_UnpackBits(swapped, iLBCbits_inst, iLBCdec_inst->mode); #else diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/ilbc/encode.c b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/ilbc/encode.c index 75d1672b8a7a..2f899a53b99d 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/ilbc/encode.c +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/ilbc/encode.c @@ -32,7 +32,7 @@ #include "unpack_bits.h" #include "index_conv_dec.h" #endif -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN #include "swap_bytes.h" #endif @@ -489,7 +489,7 @@ void WebRtcIlbcfix_EncodeImpl( WebRtcIlbcfix_PackBits(bytes, iLBCbits_inst, iLBCenc_inst->mode); #endif -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN /* Swap bytes for LITTLE ENDIAN since the packbits() function assumes BIG_ENDIAN machine */ #ifdef SPLIT_10MS diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/ilbc/ilbc.gypi b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/ilbc/ilbc.gypi index b07e3f1a470a..ec3284f82038 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/ilbc/ilbc.gypi +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/ilbc/ilbc.gypi @@ -16,10 +16,12 @@ ], 'include_dirs': [ 'interface', + '<(webrtc_root)', ], 'direct_dependent_settings': { 'include_dirs': [ 'interface', + '<(webrtc_root)', ], }, 'sources': [ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c index 945475f80711..8baa30738f66 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c @@ -327,7 +327,7 @@ int16_t WebRtcIsacfix_Encode(ISACFIX_MainStruct *ISAC_main_inst, { ISACFIX_SubStruct *ISAC_inst; int16_t stream_len; -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN int k; #endif @@ -352,7 +352,7 @@ int16_t WebRtcIsacfix_Encode(ISACFIX_MainStruct *ISAC_main_inst, /* convert from bytes to int16_t */ -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN for (k=0;k<(stream_len+1)>>1;k++) { encoded[k] = (int16_t)( ( (uint16_t)(ISAC_inst->ISACenc_obj.bitstr_obj).stream[k] >> 8 ) | (((ISAC_inst->ISACenc_obj.bitstr_obj).stream[k] & 0x00FF) << 8)); @@ -442,7 +442,7 @@ int16_t WebRtcIsacfix_EncodeNb(ISACFIX_MainStruct *ISAC_main_inst, /* convert from bytes to int16_t */ -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN for (k=0;k<(stream_len+1)>>1;k++) { encoded[k] = (int16_t)(((uint16_t)(ISAC_inst->ISACenc_obj.bitstr_obj).stream[k] >> 8) | (((ISAC_inst->ISACenc_obj.bitstr_obj).stream[k] & 0x00FF) << 8)); @@ -485,7 +485,7 @@ int16_t WebRtcIsacfix_GetNewBitStream(ISACFIX_MainStruct *ISAC_main_inst, { ISACFIX_SubStruct *ISAC_inst; int16_t stream_len; -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN int k; #endif @@ -507,7 +507,7 @@ int16_t WebRtcIsacfix_GetNewBitStream(ISACFIX_MainStruct *ISAC_main_inst, return -1; } -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN for (k=0;k<(stream_len+1)>>1;k++) { encoded[k] = (int16_t)( ( (uint16_t)(ISAC_inst->ISACenc_obj.bitstr_obj).stream[k] >> 8 ) | (((ISAC_inst->ISACenc_obj.bitstr_obj).stream[k] & 0x00FF) << 8)); @@ -588,7 +588,7 @@ int16_t WebRtcIsacfix_UpdateBwEstimate1(ISACFIX_MainStruct *ISAC_main_inst, ISACFIX_SubStruct *ISAC_inst; Bitstr_dec streamdata; uint16_t partOfStream[5]; -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN int k; #endif int16_t err; @@ -621,7 +621,7 @@ int16_t WebRtcIsacfix_UpdateBwEstimate1(ISACFIX_MainStruct *ISAC_main_inst, streamdata.stream_index = 0; streamdata.full = 1; -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN for (k=0; k<5; k++) { streamdata.stream[k] = (uint16_t) (((uint16_t)encoded[k] >> 8)|((encoded[k] & 0xFF)<<8)); } @@ -676,7 +676,7 @@ int16_t WebRtcIsacfix_UpdateBwEstimate(ISACFIX_MainStruct *ISAC_main_inst, ISACFIX_SubStruct *ISAC_inst; Bitstr_dec streamdata; uint16_t partOfStream[5]; -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN int k; #endif int16_t err; @@ -709,7 +709,7 @@ int16_t WebRtcIsacfix_UpdateBwEstimate(ISACFIX_MainStruct *ISAC_main_inst, streamdata.stream_index = 0; streamdata.full = 1; -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN for (k=0; k<5; k++) { streamdata.stream[k] = (uint16_t) ((encoded[k] >> 8)|((encoded[k] & 0xFF)<<8)); } @@ -765,7 +765,7 @@ int16_t WebRtcIsacfix_Decode(ISACFIX_MainStruct *ISAC_main_inst, /* number of samples (480 or 960), output from decoder */ /* that were actually used in the encoder/decoder (determined on the fly) */ int16_t number_of_samples; -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN int k; #endif int16_t declen = 0; @@ -793,7 +793,7 @@ int16_t WebRtcIsacfix_Decode(ISACFIX_MainStruct *ISAC_main_inst, (ISAC_inst->ISACdec_obj.bitstr_obj).stream = (uint16_t *)encoded; /* convert bitstream from int16_t to bytes */ -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN for (k=0; k<(len>>1); k++) { (ISAC_inst->ISACdec_obj.bitstr_obj).stream[k] = (uint16_t) ((encoded[k] >> 8)|((encoded[k] & 0xFF)<<8)); } @@ -868,7 +868,7 @@ int16_t WebRtcIsacfix_DecodeNb(ISACFIX_MainStruct *ISAC_main_inst, /* twice the number of samples (480 or 960), output from decoder */ /* that were actually used in the encoder/decoder (determined on the fly) */ int16_t number_of_samples; -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN int k; #endif int16_t declen = 0; @@ -894,7 +894,7 @@ int16_t WebRtcIsacfix_DecodeNb(ISACFIX_MainStruct *ISAC_main_inst, (ISAC_inst->ISACdec_obj.bitstr_obj).stream = (uint16_t *)encoded; /* convert bitstream from int16_t to bytes */ -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN for (k=0; k<(len>>1); k++) { (ISAC_inst->ISACdec_obj.bitstr_obj).stream[k] = (uint16_t) ((encoded[k] >> 8)|((encoded[k] & 0xFF)<<8)); } @@ -1267,7 +1267,7 @@ int16_t WebRtcIsacfix_ReadFrameLen(const int16_t* encoded, { Bitstr_dec streamdata; uint16_t partOfStream[5]; -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN int k; #endif int16_t err; @@ -1280,7 +1280,7 @@ int16_t WebRtcIsacfix_ReadFrameLen(const int16_t* encoded, streamdata.stream_index = 0; streamdata.full = 1; -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN for (k=0; k<5; k++) { streamdata.stream[k] = (uint16_t) (((uint16_t)encoded[k] >> 8)|((encoded[k] & 0xFF)<<8)); } @@ -1316,7 +1316,7 @@ int16_t WebRtcIsacfix_ReadBwIndex(const int16_t* encoded, { Bitstr_dec streamdata; uint16_t partOfStream[5]; -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN int k; #endif int16_t err; @@ -1329,7 +1329,7 @@ int16_t WebRtcIsacfix_ReadBwIndex(const int16_t* encoded, streamdata.stream_index = 0; streamdata.full = 1; -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN for (k=0; k<5; k++) { streamdata.stream[k] = (uint16_t) (((uint16_t)encoded[k] >> 8)|((encoded[k] & 0xFF)<<8)); } diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.gypi b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.gypi index fd7bf2e9608b..87c98606a110 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.gypi +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.gypi @@ -17,10 +17,12 @@ ], 'include_dirs': [ '../interface', + '<(webrtc_root)' ], 'direct_dependent_settings': { 'include_dirs': [ '../interface', + '<(webrtc_root)', ], }, 'sources': [ @@ -72,7 +74,7 @@ 'WEBRTC_LINUX', ], }], - ['(target_arch=="arm" and armv7==1) or target_arch=="armv7"', { + ['(target_arch=="arm" and arm_version==7) or target_arch=="armv7"', { 'dependencies': [ 'isac_neon', ], 'sources': [ 'lattice_armv7.S', @@ -87,7 +89,7 @@ }, ], 'conditions': [ - ['(target_arch=="arm" and armv7==1) or target_arch=="armv7"', { + ['(target_arch=="arm" and arm_version==7) or target_arch=="armv7"', { 'targets': [ { 'target_name': 'isac_neon', @@ -96,6 +98,9 @@ 'dependencies': [ '<(webrtc_root)/common_audio/common_audio.gyp:common_audio', ], + 'include_dirs': [ + '<(webrtc_root)', + ], 'sources': [ 'entropy_coding_neon.c', 'filterbanks_neon.S', diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.c b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.c index a1dced98b9ad..9c4e58756d0b 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.c +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.c @@ -8,20 +8,14 @@ * be found in the AUTHORS file in the root of the source tree. */ -/* - * pitch_estimator.c - * - * Pitch filter functions - * - */ +#include "webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h" #ifdef WEBRTC_ARCH_ARM_NEON #include #endif -#include "pitch_estimator.h" -#include "signal_processing_library.h" -#include "system_wrappers/interface/compile_assert.h" +#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h" +#include "webrtc/system_wrappers/interface/compile_assert_c.h" /* log2[0.2, 0.5, 0.98] in Q8 */ static const int16_t kLogLagWinQ8[3] = { diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter.c b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter.c index 89d884a0b91b..c3db01c64e05 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter.c +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter.c @@ -8,18 +8,12 @@ * be found in the AUTHORS file in the root of the source tree. */ -/* - * pitch_filter.c - * - * Pitch filter functions - * - */ +#include "webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h" -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" -#include "system_wrappers/interface/compile_assert.h" +#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h" +#include "webrtc/modules/audio_coding/codecs/isac/fix/source/settings.h" +#include "webrtc/modules/audio_coding/codecs/isac/fix/source/structs.h" +#include "webrtc/system_wrappers/interface/compile_assert_c.h" // Number of segments in a pitch subframe. static const int kSegments = 5; @@ -27,11 +21,6 @@ static const int kSegments = 5; // A division factor of 1/5 in Q15. static const int16_t kDivFactor = 6553; -// Filter coefficicients in Q15. -static const int16_t kDampFilter[PITCH_DAMPORDER] = { - -2294, 8192, 20972, 8192, -2294 -}; - // Interpolation coefficients; generated by design_pitch_filter.m. // Coefficients are stored in Q14. static const int16_t kIntrpCoef[PITCH_FRACS][PITCH_FRACORDER] = { diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/isac_test.gypi b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/isac_test.gypi index 0130732af7a2..a43450e047c3 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/isac_test.gypi +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/isac_test.gypi @@ -19,6 +19,7 @@ './main/test', './main/interface', './main/util', + '<(webrtc_root)', ], 'sources': [ './main/test/simpleKenny.c', @@ -36,6 +37,7 @@ './main/test', './main/interface', './main/util', + '<(webrtc_root)', ], 'sources': [ './main/test/ReleaseTest-API/ReleaseTest-API.cc', @@ -54,6 +56,7 @@ './main/interface', '../../../../common_audio/signal_processing/include', './main/util', + '<(webrtc_root)', ], 'sources': [ './main/test/SwitchingSampRate/SwitchingSampRate.cc', diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/isacfix_test.gypi b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/isacfix_test.gypi index 4bac161da18d..419d302fd981 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/isacfix_test.gypi +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/isacfix_test.gypi @@ -19,6 +19,7 @@ 'include_dirs': [ './fix/test', './fix/interface', + '<(webrtc_root)', ], 'sources': [ './fix/test/kenny.cc', diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h index f937b3453fab..76a61e6d33c5 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h @@ -14,7 +14,7 @@ /* * Define the fixed-point numeric formats */ -#include "typedefs.h" +#include "webrtc/typedefs.h" typedef struct WebRtcISACStruct ISACStruct; diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c index 66bf06d472bb..9ae69a0bbf2d 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c @@ -1449,13 +1449,18 @@ void WebRtcIsac_EncodeRc(int16_t* RCQ15, Bitstr* streamdata) { /* quantize reflection coefficients (add noise feedback?) */ for (k = 0; k < AR_ORDER; k++) { index[k] = WebRtcIsac_kQArRcInitIndex[k]; - + // The safe-guards in following while conditions are to suppress gcc 4.8.3 + // warnings, Issue 2888. Otherwise, first and last elements of + // |WebRtcIsac_kQArBoundaryLevels| are such that the following search + // *never* cause an out-of-boundary read. if (RCQ15[k] > WebRtcIsac_kQArBoundaryLevels[index[k]]) { - while (RCQ15[k] > WebRtcIsac_kQArBoundaryLevels[index[k] + 1]) { + while (index[k] + 1 < NUM_AR_RC_QUANT_BAUNDARY && + RCQ15[k] > WebRtcIsac_kQArBoundaryLevels[index[k] + 1]) { index[k]++; } } else { - while (RCQ15[k] < WebRtcIsac_kQArBoundaryLevels[--index[k]]) ; + while (index[k] > 0 && + RCQ15[k] < WebRtcIsac_kQArBoundaryLevels[--index[k]]) ; } RCQ15[k] = *(WebRtcIsac_kQArRcLevelsPtr[k] + index[k]); } diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/isac.c b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/isac.c index 1e9027200204..fa54a8d873c4 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/isac.c +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/isac.c @@ -15,20 +15,21 @@ * */ -#include "isac.h" -#include "bandwidth_estimator.h" -#include "crc.h" -#include "entropy_coding.h" -#include "codec.h" -#include "structs.h" -#include "signal_processing_library.h" -#include "lpc_shape_swb16_tables.h" -#include "os_specific_inline.h" +#include "webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h" -#include -#include -#include #include +#include +#include +#include + +#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h" +#include "webrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h" +#include "webrtc/modules/audio_coding/codecs/isac/main/source/codec.h" +#include "webrtc/modules/audio_coding/codecs/isac/main/source/crc.h" +#include "webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.h" +#include "webrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h" +#include "webrtc/modules/audio_coding/codecs/isac/main/source/os_specific_inline.h" +#include "webrtc/modules/audio_coding/codecs/isac/main/source/structs.h" #define BIT_MASK_DEC_INIT 0x0001 #define BIT_MASK_ENC_INIT 0x0002 @@ -273,7 +274,7 @@ int16_t WebRtcIsac_Create(ISACStruct** ISAC_main_inst) { ISACMainStruct* instISAC; if (ISAC_main_inst != NULL) { - instISAC = (ISACMainStruct*)WEBRTC_SPL_VNEW(ISACMainStruct, 1); + instISAC = (ISACMainStruct*)malloc(sizeof(ISACMainStruct)); *ISAC_main_inst = (ISACStruct*)instISAC; if (*ISAC_main_inst != NULL) { instISAC->errorCode = 0; @@ -306,7 +307,7 @@ int16_t WebRtcIsac_Create(ISACStruct** ISAC_main_inst) { */ int16_t WebRtcIsac_Free(ISACStruct* ISAC_main_inst) { ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - WEBRTC_SPL_FREE(instISAC); + free(instISAC); return 0; } @@ -552,8 +553,8 @@ int16_t WebRtcIsac_Encode(ISACStruct* ISAC_main_inst, } if (instISAC->encoderSamplingRateKHz == kIsacSuperWideband) { - WebRtcSpl_AnalysisQMF(speech_in_ptr, speechInLB, speechInUB, - instISAC->analysisFBState1, + WebRtcSpl_AnalysisQMF(speech_in_ptr, SWBFRAMESAMPLES_10ms, speechInLB, + speechInUB, instISAC->analysisFBState1, instISAC->analysisFBState2); /* Convert from fixed to floating point. */ @@ -742,7 +743,7 @@ int16_t WebRtcIsac_Encode(ISACStruct* ISAC_main_inst, WebRtcIsac_GetCrc((int16_t*)(&(ptrEncodedUW8[streamLenLB + 1])), streamLenUB + garbageLen, &crc); -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN for (k = 0; k < LEN_CHECK_SUM_WORD8; k++) { ptrEncodedUW8[streamLen - LEN_CHECK_SUM_WORD8 + k] = (uint8_t)((crc >> (24 - k * 8)) & 0xFF); @@ -805,7 +806,7 @@ int16_t WebRtcIsac_GetNewBitStream(ISACStruct* ISAC_main_inst, int32_t currentBN; uint8_t* encodedPtrUW8 = (uint8_t*)encoded; uint32_t crc; -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN int16_t k; #endif ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; @@ -896,7 +897,7 @@ int16_t WebRtcIsac_GetNewBitStream(ISACStruct* ISAC_main_inst, WebRtcIsac_GetCrc((int16_t*)(&(encodedPtrUW8[streamLenLB + 1])), streamLenUB, &crc); -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN for (k = 0; k < LEN_CHECK_SUM_WORD8; k++) { encodedPtrUW8[totalStreamLen - LEN_CHECK_SUM_WORD8 + k] = (uint8_t)((crc >> (24 - k * 8)) & 0xFF); @@ -1008,7 +1009,7 @@ int16_t WebRtcIsac_UpdateBwEstimate(ISACStruct* ISAC_main_inst, uint32_t arr_ts) { ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; Bitstr streamdata; -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN int k; #endif int16_t err; @@ -1029,7 +1030,7 @@ int16_t WebRtcIsac_UpdateBwEstimate(ISACStruct* ISAC_main_inst, WebRtcIsac_ResetBitstream(&(streamdata)); -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN for (k = 0; k < 10; k++) { streamdata.stream[k] = (uint8_t)((encoded[k >> 1] >> ((k & 1) << 3)) & 0xFF); @@ -1314,7 +1315,7 @@ static int16_t Decode(ISACStruct* ISAC_main_inst, speechIdx = 0; while (speechIdx < numSamplesLB) { WebRtcSpl_SynthesisQMF(&outFrameLB[speechIdx], &outFrameUB[speechIdx], - &decoded[(speechIdx << 1)], + FRAMESAMPLES_10ms, &decoded[(speechIdx << 1)], instISAC->synthesisFBState1, instISAC->synthesisFBState2); @@ -1741,14 +1742,14 @@ int16_t WebRtcIsac_UpdateUplinkBw(ISACStruct* ISAC_main_inst, int16_t WebRtcIsac_ReadBwIndex(const int16_t* encoded, int16_t* bweIndex) { Bitstr streamdata; -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN int k; #endif int16_t err; WebRtcIsac_ResetBitstream(&(streamdata)); -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN for (k = 0; k < 10; k++) { streamdata.stream[k] = (uint8_t)((encoded[k >> 1] >> ((k & 1) << 3)) & 0xFF); @@ -1790,7 +1791,7 @@ int16_t WebRtcIsac_ReadFrameLen(ISACStruct* ISAC_main_inst, const int16_t* encoded, int16_t* frameLength) { Bitstr streamdata; -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN int k; #endif int16_t err; @@ -1798,7 +1799,7 @@ int16_t WebRtcIsac_ReadFrameLen(ISACStruct* ISAC_main_inst, WebRtcIsac_ResetBitstream(&(streamdata)); -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN for (k = 0; k < 10; k++) { streamdata.stream[k] = (uint8_t)((encoded[k >> 1] >> ((k & 1) << 3)) & 0xFF); @@ -2108,7 +2109,7 @@ int16_t WebRtcIsac_GetRedPayload(ISACStruct* ISAC_main_inst, int16_t totalLenUB; uint8_t* ptrEncodedUW8 = (uint8_t*)encoded; ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN int k; #endif @@ -2164,7 +2165,7 @@ int16_t WebRtcIsac_GetRedPayload(ISACStruct* ISAC_main_inst, WebRtcIsac_GetCrc((int16_t*)(&(ptrEncodedUW8[streamLenLB + 1])), streamLenUB, &crc); -#ifndef WEBRTC_BIG_ENDIAN +#ifndef WEBRTC_ARCH_BIG_ENDIAN for (k = 0; k < LEN_CHECK_SUM_WORD8; k++) { ptrEncodedUW8[streamLen - LEN_CHECK_SUM_WORD8 + k] = (uint8_t)((crc >> (24 - k * 8)) & 0xFF); diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/isac.gypi b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/isac.gypi index 3d4859832af5..5ed6d448bbcd 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/isac.gypi +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/isac.gypi @@ -16,10 +16,12 @@ ], 'include_dirs': [ '../interface', + '<(webrtc_root)', ], 'direct_dependent_settings': { 'include_dirs': [ '../interface', + '<(webrtc_root)', ], }, 'sources': [ @@ -73,18 +75,13 @@ 'structs.h', 'os_specific_inline.h', ], - 'conditions': [ - ['OS!="win"', { - 'defines': [ - 'WEBRTC_LINUX', - ], - 'link_settings': { - 'libraries': [ - '-lm', - ], - }, - }], - ], + 'conditions': [ + ['OS=="linux"', { + 'link_settings': { + 'libraries': ['-lm',], + }, + }], + ], }, ], } diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/os_specific_inline.h b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/os_specific_inline.h index c469c2ec9cc4..891c67ccda68 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/os_specific_inline.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/os_specific_inline.h @@ -15,8 +15,7 @@ #include #include "typedefs.h" -// TODO(turaj): switch to WEBRTC_POSIX when available -#if defined(WEBRTC_LINUX) || defined(WEBRTC_MAC) +#if defined(WEBRTC_POSIX) #define WebRtcIsac_lrint lrint #elif (defined(WEBRTC_ARCH_X86) && defined(WIN32)) static __inline long int WebRtcIsac_lrint(double x_dbl) { diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c index 9eae0555f65a..0f6d889225dd 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c @@ -13,68 +13,69 @@ /********************* AR Coefficient Tables ************************/ /* cdf for quantized reflection coefficient 1 */ -const uint16_t WebRtcIsac_kQArRc1Cdf[12] = { +const uint16_t WebRtcIsac_kQArRc1Cdf[NUM_AR_RC_QUANT_BAUNDARY] = { 0, 2, 4, 129, 7707, 57485, 65495, 65527, 65529, 65531, 65533, 65535}; /* cdf for quantized reflection coefficient 2 */ -const uint16_t WebRtcIsac_kQArRc2Cdf[12] = { +const uint16_t WebRtcIsac_kQArRc2Cdf[NUM_AR_RC_QUANT_BAUNDARY] = { 0, 2, 4, 7, 531, 25298, 64525, 65526, 65529, 65531, 65533, 65535}; /* cdf for quantized reflection coefficient 3 */ -const uint16_t WebRtcIsac_kQArRc3Cdf[12] = { +const uint16_t WebRtcIsac_kQArRc3Cdf[NUM_AR_RC_QUANT_BAUNDARY] = { 0, 2, 4, 6, 620, 22898, 64843, 65527, 65529, 65531, 65533, 65535}; /* cdf for quantized reflection coefficient 4 */ -const uint16_t WebRtcIsac_kQArRc4Cdf[12] = { +const uint16_t WebRtcIsac_kQArRc4Cdf[NUM_AR_RC_QUANT_BAUNDARY] = { 0, 2, 4, 6, 35, 10034, 60733, 65506, 65529, 65531, 65533, 65535}; /* cdf for quantized reflection coefficient 5 */ -const uint16_t WebRtcIsac_kQArRc5Cdf[12] = { +const uint16_t WebRtcIsac_kQArRc5Cdf[NUM_AR_RC_QUANT_BAUNDARY] = { 0, 2, 4, 6, 36, 7567, 56727, 65385, 65529, 65531, 65533, 65535}; /* cdf for quantized reflection coefficient 6 */ -const uint16_t WebRtcIsac_kQArRc6Cdf[12] = { +const uint16_t WebRtcIsac_kQArRc6Cdf[NUM_AR_RC_QUANT_BAUNDARY] = { 0, 2, 4, 6, 14, 6579, 57360, 65409, 65529, 65531, 65533, 65535}; /* representation levels for quantized reflection coefficient 1 */ -const int16_t WebRtcIsac_kQArRc1Levels[11] = { +const int16_t WebRtcIsac_kQArRc1Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = { -32104, -29007, -23202, -15496, -9279, -2577, 5934, 17535, 24512, 29503, 32104 }; /* representation levels for quantized reflection coefficient 2 */ -const int16_t WebRtcIsac_kQArRc2Levels[11] = { +const int16_t WebRtcIsac_kQArRc2Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = { -32104, -29503, -23494, -15261, -7309, -1399, 6158, 16381, 24512, 29503, 32104 }; /* representation levels for quantized reflection coefficient 3 */ -const int16_t WebRtcIsac_kQArRc3Levels[11] = { +const int16_t WebRtcIsac_kQArRc3Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = { -32104, -29503, -23157, -15186, -7347, -1359, 5829, 17535, 24512, 29503, 32104 }; /* representation levels for quantized reflection coefficient 4 */ -const int16_t WebRtcIsac_kQArRc4Levels[11] = { +const int16_t WebRtcIsac_kQArRc4Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = { -32104, -29503, -24512, -15362, -6665, -342, 6596, 14585, 24512, 29503, 32104 }; /* representation levels for quantized reflection coefficient 5 */ -const int16_t WebRtcIsac_kQArRc5Levels[11] = { +const int16_t WebRtcIsac_kQArRc5Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = { -32104, -29503, -24512, -15005, -6564, -106, 7123, 14920, 24512, 29503, 32104 }; /* representation levels for quantized reflection coefficient 6 */ -const int16_t WebRtcIsac_kQArRc6Levels[11] = { +const int16_t WebRtcIsac_kQArRc6Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = { -32104, -29503, -24512, -15096, -6656, -37, 7036, 14847, 24512, 29503, 32104 }; /* quantization boundary levels for reflection coefficients */ -const int16_t WebRtcIsac_kQArBoundaryLevels[12] = { --32768, -31441, -27566, -21458, -13612, -4663, 4663, 13612, 21458, 27566, 31441, 32767 +const int16_t WebRtcIsac_kQArBoundaryLevels[NUM_AR_RC_QUANT_BAUNDARY] = { +-32768, -31441, -27566, -21458, -13612, -4663, 4663, 13612, 21458, 27566, 31441, +32767 }; /* initial index for AR reflection coefficient quantizer and cdf table search */ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h index 22fe6a2102ca..989cb367bff9 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h @@ -21,27 +21,29 @@ #include "structs.h" +#define NUM_AR_RC_QUANT_BAUNDARY 12 + /********************* AR Coefficient Tables ************************/ /* cdf for quantized reflection coefficient 1 */ -extern const uint16_t WebRtcIsac_kQArRc1Cdf[12]; +extern const uint16_t WebRtcIsac_kQArRc1Cdf[NUM_AR_RC_QUANT_BAUNDARY]; /* cdf for quantized reflection coefficient 2 */ -extern const uint16_t WebRtcIsac_kQArRc2Cdf[12]; +extern const uint16_t WebRtcIsac_kQArRc2Cdf[NUM_AR_RC_QUANT_BAUNDARY]; /* cdf for quantized reflection coefficient 3 */ -extern const uint16_t WebRtcIsac_kQArRc3Cdf[12]; +extern const uint16_t WebRtcIsac_kQArRc3Cdf[NUM_AR_RC_QUANT_BAUNDARY]; /* cdf for quantized reflection coefficient 4 */ -extern const uint16_t WebRtcIsac_kQArRc4Cdf[12]; +extern const uint16_t WebRtcIsac_kQArRc4Cdf[NUM_AR_RC_QUANT_BAUNDARY]; /* cdf for quantized reflection coefficient 5 */ -extern const uint16_t WebRtcIsac_kQArRc5Cdf[12]; +extern const uint16_t WebRtcIsac_kQArRc5Cdf[NUM_AR_RC_QUANT_BAUNDARY]; /* cdf for quantized reflection coefficient 6 */ -extern const uint16_t WebRtcIsac_kQArRc6Cdf[12]; +extern const uint16_t WebRtcIsac_kQArRc6Cdf[NUM_AR_RC_QUANT_BAUNDARY]; /* quantization boundary levels for reflection coefficients */ -extern const int16_t WebRtcIsac_kQArBoundaryLevels[12]; +extern const int16_t WebRtcIsac_kQArBoundaryLevels[NUM_AR_RC_QUANT_BAUNDARY]; /* initial indices for AR reflection coefficient quantizer and cdf table search */ extern const uint16_t WebRtcIsac_kQArRcInitIndex[AR_ORDER]; diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/structs.h b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/structs.h index 1bd73e75bd09..62c890c84bc7 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/structs.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/main/source/structs.h @@ -18,10 +18,9 @@ #ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_STRUCTS_H_ #define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_STRUCTS_H_ - -#include "typedefs.h" -#include "settings.h" -#include "isac.h" +#include "webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h" +#include "webrtc/modules/audio_coding/codecs/isac/main/source/settings.h" +#include "webrtc/typedefs.h" typedef struct Bitstreamstruct { diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/opus/opus.gypi b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/opus/opus.gypi index 00e77e7d31d0..5af3dce0d8fd 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/opus/opus.gypi +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/opus/opus.gypi @@ -23,6 +23,9 @@ ], }], ], + 'include_dirs': [ + '<(webrtc_root)', + ], 'sources': [ 'interface/opus_interface.h', 'opus_interface.c', diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.c b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.c index e10a87191f44..c2658cbed868 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.c +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.c @@ -25,7 +25,7 @@ int16_t WebRtcPcm16b_EncodeW16(int16_t *speechIn16b, int16_t len, int16_t *speechOut16b) { -#ifdef WEBRTC_BIG_ENDIAN +#ifdef WEBRTC_ARCH_BIG_ENDIAN memcpy(speechOut16b, speechIn16b, len * sizeof(int16_t)); #else int i; @@ -64,7 +64,7 @@ int16_t WebRtcPcm16b_DecodeW16(void *inst, int16_t *speechOut16b, int16_t* speechType) { -#ifdef WEBRTC_BIG_ENDIAN +#ifdef WEBRTC_ARCH_BIG_ENDIAN memcpy(speechOut16b, speechIn16b, ((len*sizeof(int16_t)+1)>>1)); #else int i; diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.gypi b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.gypi index 40bc2aabf658..bbb613e3ce0f 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.gypi +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.gypi @@ -13,10 +13,12 @@ 'type': 'static_library', 'include_dirs': [ 'include', + '<(webrtc_root)', ], 'direct_dependent_settings': { 'include_dirs': [ 'include', + '<(webrtc_root)', ], }, 'sources': [ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_amr.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_amr.cc index ab4003abbfe4..64e323c0ebd8 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_amr.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_amr.cc @@ -43,6 +43,8 @@ namespace webrtc { +namespace acm2 { + #ifndef WEBRTC_CODEC_AMR ACMAMR::ACMAMR(int16_t /* codec_id */) : encoder_inst_ptr_(NULL), @@ -307,4 +309,6 @@ ACMAMRPackingFormat ACMAMR::AMRDecoderPackingFormat() const { } #endif +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_amr.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_amr.h index 4471e6bca72f..efa568740789 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_amr.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_amr.h @@ -21,6 +21,8 @@ namespace webrtc { enum ACMAMRPackingFormat; +namespace acm2 { + class ACMAMR : public ACMGenericCodec { public: explicit ACMAMR(int16_t codec_id); @@ -60,6 +62,8 @@ class ACMAMR : public ACMGenericCodec { ACMAMRPackingFormat encoder_packing_format_; }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_AMR_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_amrwb.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_amrwb.cc index 849353a93372..4dd4e97b2c30 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_amrwb.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_amrwb.cc @@ -40,6 +40,8 @@ namespace webrtc { +namespace acm2 { + #ifndef WEBRTC_CODEC_AMRWB ACMAMRwb::ACMAMRwb(int16_t /* codec_id */) : encoder_inst_ptr_(NULL), @@ -313,4 +315,6 @@ ACMAMRPackingFormat ACMAMRwb::AMRwbDecoderPackingFormat() const { #endif +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_amrwb.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_amrwb.h index e5bd99d9bbe5..c8db659336e2 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_amrwb.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_amrwb.h @@ -19,6 +19,8 @@ struct AMRWB_decinst_t_; namespace webrtc { +namespace acm2 { + class ACMAMRwb : public ACMGenericCodec { public: explicit ACMAMRwb(int16_t codec_id); @@ -61,6 +63,8 @@ class ACMAMRwb : public ACMGenericCodec { ACMAMRPackingFormat encoder_packing_format_; }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_AMRWB_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_celt.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_celt.cc index 21fa3a9d0d8f..3cd6e84d46af 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_celt.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_celt.cc @@ -20,6 +20,8 @@ namespace webrtc { +namespace acm2 { + #ifndef WEBRTC_CODEC_CELT ACMCELT::ACMCELT(int16_t /* codec_id */) @@ -188,4 +190,6 @@ int16_t ACMCELT::SetBitRateSafe(const int32_t rate) { #endif +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_celt.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_celt.h index 4b40f799e932..dc8d3ee0fa2e 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_celt.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_celt.h @@ -19,6 +19,8 @@ struct CELT_decinst_t_; namespace webrtc { +namespace acm2 { + class ACMCELT : public ACMGenericCodec { public: explicit ACMCELT(int16_t codec_id); @@ -45,6 +47,8 @@ class ACMCELT : public ACMGenericCodec { uint16_t channels_; }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_CELT_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_cng.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_cng.cc index 9e658bdad187..7f0adf819e52 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_cng.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_cng.cc @@ -17,6 +17,8 @@ namespace webrtc { +namespace acm2 { + ACMCNG::ACMCNG(int16_t codec_id) { encoder_inst_ptr_ = NULL; codec_id_ = codec_id; @@ -76,4 +78,6 @@ void ACMCNG::InternalDestructEncoderInst(void* ptr_inst) { return; } +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_cng.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_cng.h index 3816fa2a89b1..a0c1c5544eb0 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_cng.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_cng.h @@ -19,6 +19,8 @@ struct WebRtcCngDecInst; namespace webrtc { +namespace acm2 { + class ACMCNG: public ACMGenericCodec { public: explicit ACMCNG(int16_t codec_id); @@ -51,6 +53,8 @@ class ACMCNG: public ACMGenericCodec { uint16_t samp_freq_hz_; }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_CNG_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_codec_database.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_codec_database.cc index f99c85b494c6..fd30a137ae0e 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_codec_database.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_codec_database.cc @@ -102,6 +102,8 @@ namespace webrtc { +namespace acm2 { + // Not yet used payload-types. // 83, 82, 81, 80, 79, 78, 77, 76, 75, 74, 73, 72, 71, 70, 69, 68, // 67, 66, 65 @@ -270,7 +272,7 @@ const ACMCodecDB::CodecSettings ACMCodecDB::codec_settings_[] = { // Opus supports frames shorter than 10ms, // but it doesn't help us to use them. // Mono and stereo. - {1, {960}, 0, 2, false}, + {4, {480, 960, 1920, 2880}, 0, 2, false}, #endif #ifdef WEBRTC_CODEC_SPEEX {3, {160, 320, 480}, 0, 1, false}, @@ -954,4 +956,6 @@ bool ACMCodecDB::OwnsDecoder(int codec_id) { return ACMCodecDB::codec_settings_[codec_id].owns_decoder; } +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_codec_database.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_codec_database.h index b992b7d27052..98869efeead8 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_codec_database.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_codec_database.h @@ -22,6 +22,8 @@ namespace webrtc { +namespace acm2 { + // TODO(tlegrand): replace class ACMCodecDB with a namespace. class ACMCodecDB { public: @@ -350,6 +352,8 @@ class ACMCodecDB { static const NetEqDecoder neteq_decoders_[kMaxNumCodecs]; }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_CODEC_DATABASE_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_dtmf_playout.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_dtmf_playout.cc index ca7e86fd84a8..07bab6520b81 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_dtmf_playout.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_dtmf_playout.cc @@ -18,6 +18,8 @@ namespace webrtc { +namespace acm2 { + #ifndef WEBRTC_CODEC_AVT ACMDTMFPlayout::ACMDTMFPlayout(int16_t /* codec_id */) { return; } @@ -83,4 +85,6 @@ void ACMDTMFPlayout::DestructEncoderSafe() { #endif +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_dtmf_playout.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_dtmf_playout.h index 4c3154ca9cd3..5adb0ddc1e9f 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_dtmf_playout.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_dtmf_playout.h @@ -15,6 +15,8 @@ namespace webrtc { +namespace acm2 { + class ACMDTMFPlayout : public ACMGenericCodec { public: explicit ACMDTMFPlayout(int16_t codec_id); @@ -35,6 +37,8 @@ class ACMDTMFPlayout : public ACMGenericCodec { void InternalDestructEncoderInst(void* ptr_inst); }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_DTMF_PLAYOUT_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g722.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g722.cc index fe2bd6cb9f84..8d457723bcd8 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g722.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g722.cc @@ -19,6 +19,8 @@ namespace webrtc { +namespace acm2 { + #ifndef WEBRTC_CODEC_G722 ACMG722::ACMG722(int16_t /* codec_id */) @@ -194,4 +196,6 @@ void ACMG722::InternalDestructEncoderInst(void* ptr_inst) { #endif +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g722.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g722.h index 34b6c8516c61..7216a574af8b 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g722.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g722.h @@ -18,6 +18,8 @@ typedef struct WebRtcG722DecInst G722DecInst; namespace webrtc { +namespace acm2 { + // Forward declaration. struct ACMG722EncStr; struct ACMG722DecStr; @@ -52,6 +54,8 @@ class ACMG722 : public ACMGenericCodec { G722EncInst* encoder_inst_ptr_right_; // Prepared for stereo }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_G722_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g7221.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g7221.cc index 0cba7108488d..3609f072b650 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g7221.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g7221.cc @@ -80,6 +80,8 @@ namespace webrtc { +namespace acm2 { + #ifndef WEBRTC_CODEC_G722_1 ACMG722_1::ACMG722_1(int16_t /* codec_id */) @@ -323,4 +325,6 @@ void ACMG722_1::InternalDestructEncoderInst(void* ptr_inst) { #endif +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g7221.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g7221.h index 4a0bd480d969..74d9b27099d3 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g7221.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g7221.h @@ -24,6 +24,8 @@ struct G722_1_Inst_t_; namespace webrtc { +namespace acm2 { + class ACMG722_1 : public ACMGenericCodec { public: explicit ACMG722_1(int16_t codec_id); @@ -57,6 +59,8 @@ class ACMG722_1 : public ACMGenericCodec { G722_1_32_encinst_t_* encoder_inst32_ptr_right_; }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_G7221_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g7221c.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g7221c.cc index 531008af2741..f6555f5db350 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g7221c.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g7221c.cc @@ -80,6 +80,8 @@ namespace webrtc { +namespace acm2 { + #ifndef WEBRTC_CODEC_G722_1C ACMG722_1C::ACMG722_1C(int16_t /* codec_id */) @@ -329,4 +331,6 @@ void ACMG722_1C::InternalDestructEncoderInst(void* ptr_inst) { #endif +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g7221c.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g7221c.h index 961ed4e17a2b..66b0f54a37ee 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g7221c.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g7221c.h @@ -24,6 +24,8 @@ struct G722_1_Inst_t_; namespace webrtc { +namespace acm2 { + class ACMG722_1C : public ACMGenericCodec { public: explicit ACMG722_1C(int16_t codec_id); @@ -57,6 +59,8 @@ class ACMG722_1C : public ACMGenericCodec { G722_1C_48_encinst_t_* encoder_inst48_ptr_right_; }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_G7221C_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g729.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g729.cc index 91dbb43ee19f..6f95f36850d9 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g729.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g729.cc @@ -22,6 +22,8 @@ namespace webrtc { +namespace acm2 { + #ifndef WEBRTC_CODEC_G729 ACMG729::ACMG729(int16_t /* codec_id */) : encoder_inst_ptr_(NULL) {} @@ -252,4 +254,6 @@ void ACMG729::InternalDestructEncoderInst(void* ptr_inst) { #endif +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g729.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g729.h index f7e762cbafdf..88b1bf7c49a5 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g729.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g729.h @@ -19,6 +19,8 @@ struct G729_decinst_t_; namespace webrtc { +namespace acm2 { + class ACMG729 : public ACMGenericCodec { public: explicit ACMG729(int16_t codec_id); @@ -49,6 +51,8 @@ class ACMG729 : public ACMGenericCodec { G729_encinst_t_* encoder_inst_ptr_; }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_G729_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g7291.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g7291.cc index f16eec89b6ac..3cf9f22fce2d 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g7291.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g7291.cc @@ -21,6 +21,8 @@ namespace webrtc { +namespace acm2 { + #ifndef WEBRTC_CODEC_G729_1 ACMG729_1::ACMG729_1(int16_t /* codec_id */) @@ -237,4 +239,6 @@ int16_t ACMG729_1::SetBitRateSafe(const int32_t rate) { #endif +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g7291.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g7291.h index 5a38e59a3413..38c8fe176cbc 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g7291.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_g7291.h @@ -19,6 +19,8 @@ struct G729_1_inst_t_; namespace webrtc { +namespace acm2 { + class ACMG729_1 : public ACMGenericCodec { public: explicit ACMG729_1(int16_t codec_id); @@ -47,6 +49,8 @@ class ACMG729_1 : public ACMGenericCodec { int16_t flag_g729_mode_; }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_G7291_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.cc index 437feb7595bd..aa8e8be0637d 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.cc @@ -21,6 +21,8 @@ namespace webrtc { +namespace acm2 { + // Enum for CNG enum { kMaxPLCParamsCNG = WEBRTC_CNG_MAX_LPC_ORDER, @@ -123,7 +125,10 @@ int32_t ACMGenericCodec::Add10MsDataSafe(const uint32_t timestamp, if ((in_audio_ix_write_ >= length_smpl * audio_channel) && (in_timestamp_ix_write_ > 0)) { in_audio_ix_write_ -= length_smpl * audio_channel; + assert(in_timestamp_ix_write_ >= 0); + in_timestamp_ix_write_--; + assert(in_audio_ix_write_ >= 0); WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, unique_id_, "Adding 10ms with previous timestamp, overwriting the " "previous 10ms"); @@ -160,8 +165,11 @@ int32_t ACMGenericCodec::Add10MsDataSafe(const uint32_t timestamp, memmove(in_timestamp_, in_timestamp_ + missed_10ms_blocks, (in_timestamp_ix_write_ - missed_10ms_blocks) * sizeof(uint32_t)); in_timestamp_ix_write_ -= missed_10ms_blocks; + assert(in_timestamp_ix_write_ >= 0); + in_timestamp_[in_timestamp_ix_write_] = timestamp; in_timestamp_ix_write_++; + assert(in_timestamp_ix_write_ < TIMESTAMP_BUFFER_SIZE_W32); // Buffer is full. in_audio_ix_write_ = AUDIO_BUFFER_SIZE_W16; @@ -173,12 +181,11 @@ int32_t ACMGenericCodec::Add10MsDataSafe(const uint32_t timestamp, memcpy(in_audio_ + in_audio_ix_write_, data, length_smpl * audio_channel * sizeof(int16_t)); in_audio_ix_write_ += length_smpl * audio_channel; - assert(in_timestamp_ix_write_ < TIMESTAMP_BUFFER_SIZE_W32); - assert(in_timestamp_ix_write_ >= 0); in_timestamp_[in_timestamp_ix_write_] = timestamp; in_timestamp_ix_write_++; + assert(in_timestamp_ix_write_ < TIMESTAMP_BUFFER_SIZE_W32); return 0; } @@ -313,11 +320,7 @@ int16_t ACMGenericCodec::Encode(uint8_t* bitstream, // break from the loop break; } - - // TODO(andrew): This should be multiplied by the number of - // channels, right? - // http://code.google.com/p/webrtc/issues/detail?id=714 - done = in_audio_ix_read_ >= frame_len_smpl_; + done = in_audio_ix_read_ >= frame_len_smpl_ * num_channels_; } } if (status >= 0) { @@ -345,6 +348,7 @@ int16_t ACMGenericCodec::Encode(uint8_t* bitstream, (in_timestamp_ix_write_ - num_10ms_blocks) * sizeof(int32_t)); } in_timestamp_ix_write_ -= num_10ms_blocks; + assert(in_timestamp_ix_write_ >= 0); // Remove encoded audio and move next audio to be encoded to the beginning // of the buffer. Accordingly, adjust the read and write indices. @@ -448,11 +452,8 @@ int16_t ACMGenericCodec::InitEncoderSafe(WebRtcACMCodecParams* codec_params, int mirrorID; int codec_number = ACMCodecDB::CodecNumber(codec_params->codec_inst, &mirrorID); - if (codec_number < 0) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_, - "InitEncoderSafe: error, codec number negative"); - return -1; - } + assert(codec_number >= 0); + // Check if the parameters are for this codec. if ((codec_id_ >= 0) && (codec_id_ != codec_number) && (codec_id_ != mirrorID)) { @@ -481,7 +482,7 @@ int16_t ACMGenericCodec::InitEncoderSafe(WebRtcACMCodecParams* codec_params, encoder_exist_ = true; } } - frame_len_smpl_ = (codec_params->codec_inst).pacsize; + frame_len_smpl_ = codec_params->codec_inst.pacsize; num_channels_ = codec_params->codec_inst.channels; status = InternalInitEncoder(codec_params); if (status < 0) { @@ -490,24 +491,25 @@ int16_t ACMGenericCodec::InitEncoderSafe(WebRtcACMCodecParams* codec_params, encoder_initialized_ = false; return -1; } else { + // TODO(turajs): Move these allocations to the constructor issue 2445. // Store encoder parameters. memcpy(&encoder_params_, codec_params, sizeof(WebRtcACMCodecParams)); encoder_initialized_ = true; if (in_audio_ == NULL) { in_audio_ = new int16_t[AUDIO_BUFFER_SIZE_W16]; - if (in_audio_ == NULL) { - return -1; - } - memset(in_audio_, 0, AUDIO_BUFFER_SIZE_W16 * sizeof(int16_t)); } if (in_timestamp_ == NULL) { in_timestamp_ = new uint32_t[TIMESTAMP_BUFFER_SIZE_W32]; - if (in_timestamp_ == NULL) { - return -1; - } - memset(in_timestamp_, 0, sizeof(uint32_t) * TIMESTAMP_BUFFER_SIZE_W32); } } + + // Fresh start of audio buffer. + memset(in_audio_, 0, sizeof(*in_audio_) * AUDIO_BUFFER_SIZE_W16); + memset(in_timestamp_, 0, sizeof(*in_timestamp_) * TIMESTAMP_BUFFER_SIZE_W32); + in_audio_ix_write_ = 0; + in_audio_ix_read_ = 0; + in_timestamp_ix_write_ = 0; + return SetVADSafe(&codec_params->enable_dtx, &codec_params->enable_vad, &codec_params->vad_mode); } @@ -1002,4 +1004,6 @@ int16_t ACMGenericCodec::REDPayloadISAC(const int32_t /* isac_rate */, return -1; } +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h index 78feb99f0c46..d41580fff54d 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h @@ -26,8 +26,12 @@ struct WebRtcCngEncInst; namespace webrtc { -// forward declaration +struct WebRtcACMCodecParams; struct CodecInst; + +namespace acm2 { + +// forward declaration class AcmReceiver; class ACMGenericCodec { @@ -909,6 +913,8 @@ class ACMGenericCodec { uint32_t unique_id_; }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_GENERIC_CODEC_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_gsmfr.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_gsmfr.cc index 44e6e3d91776..90f9fce7d6c4 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_gsmfr.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_gsmfr.cc @@ -21,6 +21,8 @@ namespace webrtc { +namespace acm2 { + #ifndef WEBRTC_CODEC_GSMFR ACMGSMFR::ACMGSMFR(int16_t /* codec_id */) : encoder_inst_ptr_(NULL) {} @@ -154,4 +156,6 @@ void ACMGSMFR::InternalDestructEncoderInst(void* ptr_inst) { #endif +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_gsmfr.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_gsmfr.h index 51c29eea4158..79c3a180b2c1 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_gsmfr.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_gsmfr.h @@ -19,6 +19,8 @@ struct GSMFR_decinst_t_; namespace webrtc { +namespace acm2 { + class ACMGSMFR : public ACMGenericCodec { public: explicit ACMGSMFR(int16_t codec_id); @@ -45,6 +47,8 @@ class ACMGSMFR : public ACMGenericCodec { GSMFR_encinst_t_* encoder_inst_ptr_; }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_GSMFR_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_ilbc.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_ilbc.cc index 14fbbd45068e..eaa079c46ce9 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_ilbc.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_ilbc.cc @@ -17,6 +17,8 @@ namespace webrtc { +namespace acm2 { + #ifndef WEBRTC_CODEC_ILBC ACMILBC::ACMILBC(int16_t /* codec_id */) : encoder_inst_ptr_(NULL) {} @@ -138,4 +140,6 @@ int16_t ACMILBC::SetBitRateSafe(const int32_t rate) { #endif +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_ilbc.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_ilbc.h index e02c789d3f0e..fd6e85379e72 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_ilbc.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_ilbc.h @@ -19,6 +19,8 @@ struct iLBC_decinst_t_; namespace webrtc { +namespace acm2 { + class ACMILBC : public ACMGenericCodec { public: explicit ACMILBC(int16_t codec_id); @@ -43,6 +45,8 @@ class ACMILBC : public ACMGenericCodec { iLBC_encinst_t_* encoder_inst_ptr_; }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_ILBC_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_isac.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_isac.cc index 7957fd3dccac..e27284212fd8 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_isac.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_isac.cc @@ -31,6 +31,8 @@ namespace webrtc { +namespace acm2 { + // we need this otherwise we cannot use forward declaration // in the header file #if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)) @@ -826,4 +828,6 @@ AudioDecoder* ACMISAC::Decoder(int codec_id) { #endif +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_isac.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_isac.h index 2e6657fb4098..a3227d5d0b39 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_isac.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_isac.h @@ -15,6 +15,8 @@ namespace webrtc { +namespace acm2 { + struct ACMISACInst; class AcmAudioDecoderIsac; @@ -93,6 +95,8 @@ class ACMISAC : public ACMGenericCodec { bool decoder_initialized_; }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_ISAC_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_isac_macros.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_isac_macros.h index c2a782095a18..df0309769458 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_isac_macros.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_isac_macros.h @@ -15,6 +15,8 @@ namespace webrtc { +namespace acm2 { + #ifdef WEBRTC_CODEC_ISAC #define ACM_ISAC_CREATE WebRtcIsac_Create #define ACM_ISAC_FREE WebRtcIsac_Free @@ -70,6 +72,8 @@ namespace webrtc { // decoder #endif +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_ISAC_MACROS_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_neteq_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_neteq_unittest.cc index 3f5a200bfcf9..607b933deb6b 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_neteq_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_neteq_unittest.cc @@ -10,4 +10,6 @@ // This file contains unit tests for ACM's NetEQ wrapper (class ACMNetEQ). -namespace webrtc {} // namespace +namespace webrtc { + +namespace acm2 {} // namespace diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_opus.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_opus.cc index d627fad8d099..c00a9203a9d2 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_opus.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_opus.cc @@ -19,6 +19,8 @@ namespace webrtc { +namespace acm2 { + #ifndef WEBRTC_CODEC_OPUS ACMOpus::ACMOpus(int16_t /* codec_id */) @@ -184,4 +186,6 @@ int16_t ACMOpus::SetBitRateSafe(const int32_t rate) { #endif // WEBRTC_CODEC_OPUS +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_opus.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_opus.h index caac01093a39..a346e3c8ff3d 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_opus.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_opus.h @@ -19,6 +19,8 @@ struct WebRtcOpusDecInst; namespace webrtc { +namespace acm2 { + class ACMOpus : public ACMGenericCodec { public: explicit ACMOpus(int16_t codec_id); @@ -45,6 +47,8 @@ class ACMOpus : public ACMGenericCodec { int channels_; }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_OPUS_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_pcm16b.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_pcm16b.cc index 7c5b0bd329cc..89688a87499b 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_pcm16b.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_pcm16b.cc @@ -19,6 +19,8 @@ namespace webrtc { +namespace acm2 { + #ifndef WEBRTC_CODEC_PCM16 ACMPCM16B::ACMPCM16B(int16_t /* codec_id */) { return; } @@ -89,4 +91,6 @@ void ACMPCM16B::DestructEncoderSafe() { #endif +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_pcm16b.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_pcm16b.h index 32490209a207..23b8c121e213 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_pcm16b.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_pcm16b.h @@ -15,6 +15,8 @@ namespace webrtc { +namespace acm2 { + class ACMPCM16B : public ACMGenericCodec { public: explicit ACMPCM16B(int16_t codec_id); @@ -37,6 +39,8 @@ class ACMPCM16B : public ACMGenericCodec { int32_t sampling_freq_hz_; }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_PCM16B_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_pcma.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_pcma.cc index cb5ebccfdc9f..1dd6e48fb16d 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_pcma.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_pcma.cc @@ -18,6 +18,8 @@ namespace webrtc { +namespace acm2 { + ACMPCMA::ACMPCMA(int16_t codec_id) { codec_id_ = codec_id; } ACMPCMA::~ACMPCMA() { return; } @@ -55,4 +57,6 @@ void ACMPCMA::DestructEncoderSafe() { return; } +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_pcma.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_pcma.h index 4102e17d97dc..2da873cb7531 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_pcma.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_pcma.h @@ -15,6 +15,8 @@ namespace webrtc { +namespace acm2 { + class ACMPCMA : public ACMGenericCodec { public: explicit ACMPCMA(int16_t codec_id); @@ -35,6 +37,8 @@ class ACMPCMA : public ACMGenericCodec { void InternalDestructEncoderInst(void* ptr_inst); }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_PCMA_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_pcmu.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_pcmu.cc index 6f479ed21975..7d2536fc0785 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_pcmu.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_pcmu.cc @@ -18,6 +18,8 @@ namespace webrtc { +namespace acm2 { + ACMPCMU::ACMPCMU(int16_t codec_id) { codec_id_ = codec_id; } ACMPCMU::~ACMPCMU() {} @@ -56,4 +58,6 @@ void ACMPCMU::DestructEncoderSafe() { encoder_initialized_ = false; } +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_pcmu.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_pcmu.h index 2898df6370e8..18d8279dcb3c 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_pcmu.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_pcmu.h @@ -15,6 +15,8 @@ namespace webrtc { +namespace acm2 { + class ACMPCMU : public ACMGenericCodec { public: explicit ACMPCMU(int16_t codec_id); @@ -35,6 +37,8 @@ class ACMPCMU : public ACMGenericCodec { void InternalDestructEncoderInst(void* ptr_inst); }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_PCMU_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_receiver.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_receiver.cc index 949a70558d45..ac92198f92b1 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_receiver.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_receiver.cc @@ -19,6 +19,7 @@ #include "webrtc/common_types.h" #include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h" #include "webrtc/modules/audio_coding/main/acm2/acm_resampler.h" +#include "webrtc/modules/audio_coding/main/acm2/call_statistics.h" #include "webrtc/modules/audio_coding/main/acm2/nack.h" #include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h" #include "webrtc/modules/audio_coding/neteq4/interface/neteq.h" @@ -30,9 +31,10 @@ namespace webrtc { +namespace acm2 { + namespace { -const int kRtpHeaderSize = 12; const int kNeteqInitSampleRateHz = 16000; const int kNackThresholdPackets = 2; @@ -121,7 +123,7 @@ AcmReceiver::AcmReceiver() last_audio_decoder_(-1), // Invalid value. decode_lock_(RWLockWrapper::CreateRWLock()), neteq_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()), - vad_enabled_(false), + vad_enabled_(true), previous_audio_activity_(AudioFrame::kVadUnknown), current_sample_rate_hz_(kNeteqInitSampleRateHz), nack_(), @@ -134,8 +136,9 @@ AcmReceiver::AcmReceiver() decoders_[n].registered = false; } - // Make sure we are on the same page as NetEq, although the default behavior - // for NetEq has been VAD disabled. + // Make sure we are on the same page as NetEq. Post-decode VAD is disabled by + // default in NetEq4, however, Audio Conference Mixer relies on VAD decision + // and fails if VAD decision is not provided. if (vad_enabled_) neteq_->EnableVad(); else @@ -459,6 +462,7 @@ int AcmReceiver::GetAudio(int desired_freq_hz, AudioFrame* audio_frame) { audio_frame->vad_activity_ = previous_audio_activity_; SetAudioFrameActivityAndType(vad_enabled_, type, audio_frame); previous_audio_activity_ = audio_frame->vad_activity_; + call_stats_.DecodedByNetEq(audio_frame->speech_type_); return 0; } @@ -545,15 +549,17 @@ int AcmReceiver::RemoveAllCodecs() { } } } + // No codec is registered, invalidate last audio decoder. + last_audio_decoder_ = -1; return ret_val; } int AcmReceiver::RemoveCodec(uint8_t payload_type) { int codec_index = PayloadType2CodecIndex(payload_type); if (codec_index < 0) { // Such a payload-type is not registered. - LOG(LS_ERROR) << "payload_type " << payload_type << " is not registered" - " to be removed."; - return -1; + LOG(LS_WARNING) << "payload_type " << payload_type << " is not registered," + " no action is taken."; + return 0; } if (neteq_->RemovePayloadType(payload_type) != NetEq::kOK) { LOG_FERR1(LS_ERROR, "AcmReceiver::RemoveCodec", payload_type); @@ -561,6 +567,8 @@ int AcmReceiver::RemoveCodec(uint8_t payload_type) { } CriticalSectionScoped lock(neteq_crit_sect_); decoders_[codec_index].registered = false; + if (last_audio_decoder_ == codec_index) + last_audio_decoder_ = -1; // Codec is removed, invalidate last decoder. return 0; } @@ -755,6 +763,9 @@ bool AcmReceiver::GetSilence(int desired_sample_rate_hz, AudioFrame* frame) { return false; } + // Update statistics. + call_stats_.DecodedBySilenceGenerator(); + // Set the values if already got a packet, otherwise set to default values. if (last_audio_decoder_ >= 0) { current_sample_rate_hz_ = ACMCodecDB::database_[last_audio_decoder_].plfreq; @@ -826,4 +837,12 @@ void AcmReceiver::InsertStreamOfSyncPackets( } } +void AcmReceiver::GetDecodingCallStatistics( + AudioDecodingCallStats* stats) const { + CriticalSectionScoped lock(neteq_crit_sect_); + *stats = call_stats_.GetDecodingStatistics(); +} + +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_receiver.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_receiver.h index 5f6d684b0aa3..81eb5206b8a9 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_receiver.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_receiver.h @@ -18,6 +18,7 @@ #include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h" #include "webrtc/modules/audio_coding/main/acm2/acm_codec_database.h" #include "webrtc/modules/audio_coding/main/acm2/acm_resampler.h" +#include "webrtc/modules/audio_coding/main/acm2/call_statistics.h" #include "webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h" #include "webrtc/modules/audio_coding/neteq4/interface/neteq.h" #include "webrtc/modules/interface/module_common_types.h" @@ -26,11 +27,14 @@ namespace webrtc { +struct CodecInst; class CriticalSectionWrapper; class RWLockWrapper; class NetEq; + +namespace acm2 { + class Nack; -struct CodecInst; class AcmReceiver { public: @@ -317,6 +321,10 @@ class AcmReceiver { // NetEqBackgroundNoiseMode BackgroundNoiseModeForTest() const; + // + // Get statistics of calls to GetAudio(). + void GetDecodingCallStatistics(AudioDecodingCallStats* stats) const; + private: int PayloadType2CodecIndex(uint8_t payload_type) const; @@ -358,8 +366,12 @@ class AcmReceiver { // initial delay is set. scoped_ptr missing_packets_sync_stream_; scoped_ptr late_packets_sync_stream_; + + CallStatistics call_stats_; }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_RECEIVER_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_receiver_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_receiver_unittest.cc index e03029b068c2..712eeb268772 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_receiver_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_receiver_unittest.cc @@ -14,13 +14,17 @@ #include "gtest/gtest.h" #include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h" +#include "webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h" #include "webrtc/modules/audio_coding/main/acm2/acm_codec_database.h" #include "webrtc/modules/audio_coding/neteq4/tools/rtp_generator.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" #include "webrtc/test/test_suite.h" #include "webrtc/test/testsupport/fileutils.h" +#include "webrtc/test/testsupport/gtest_disable.h" namespace webrtc { + +namespace acm2 { namespace { bool CodecsEqual(const CodecInst& codec_a, const CodecInst& codec_b) { @@ -39,7 +43,7 @@ class AcmReceiverTest : public AudioPacketizationCallback, protected: AcmReceiverTest() : receiver_(new AcmReceiver), - acm_(AudioCodingModule::Create(0)), + acm_(new AudioCodingModuleImpl(0)), timestamp_(0), packet_sent_(false), last_packet_send_timestamp_(timestamp_), @@ -152,7 +156,7 @@ class AcmReceiverTest : public AudioPacketizationCallback, FrameType last_frame_type_; }; -TEST_F(AcmReceiverTest, AddCodecGetCodec) { +TEST_F(AcmReceiverTest, DISABLED_ON_ANDROID(AddCodecGetCodec)) { // Add codec. for (int n = 0; n < ACMCodecDB::kNumCodecs; ++n) { if (n & 0x1) // Just add codecs with odd index. @@ -175,7 +179,7 @@ TEST_F(AcmReceiverTest, AddCodecGetCodec) { } } -TEST_F(AcmReceiverTest, AddCodecChangePayloadType) { +TEST_F(AcmReceiverTest, DISABLED_ON_ANDROID(AddCodecChangePayloadType)) { CodecInst ref_codec; const int codec_id = ACMCodecDB::kPCMA; EXPECT_EQ(0, ACMCodecDB::Codec(codec_id, &ref_codec)); @@ -199,7 +203,7 @@ TEST_F(AcmReceiverTest, AddCodecChangePayloadType) { EXPECT_TRUE(CodecsEqual(test_codec, ref_codec)); } -TEST_F(AcmReceiverTest, AddCodecRemoveCodec) { +TEST_F(AcmReceiverTest, DISABLED_ON_ANDROID(AddCodecRemoveCodec)) { CodecInst codec; const int codec_id = ACMCodecDB::kPCMA; EXPECT_EQ(0, ACMCodecDB::Codec(codec_id, &codec)); @@ -207,8 +211,8 @@ TEST_F(AcmReceiverTest, AddCodecRemoveCodec) { EXPECT_EQ(0, receiver_->AddCodec(codec_id, codec.pltype, codec.channels, NULL)); - // Remove non-existing codec, must fail. - EXPECT_EQ(-1, receiver_->RemoveCodec(payload_type + 1)); + // Remove non-existing codec should not fail. ACM1 legacy. + EXPECT_EQ(0, receiver_->RemoveCodec(payload_type + 1)); // Remove an existing codec. EXPECT_EQ(0, receiver_->RemoveCodec(payload_type)); @@ -217,7 +221,7 @@ TEST_F(AcmReceiverTest, AddCodecRemoveCodec) { EXPECT_EQ(-1, receiver_->DecoderByPayloadType(payload_type, &codec)); } -TEST_F(AcmReceiverTest, SampleRate) { +TEST_F(AcmReceiverTest, DISABLED_ON_ANDROID(SampleRate)) { const int kCodecId[] = { ACMCodecDB::kISAC, ACMCodecDB::kISACSWB, ACMCodecDB::kISACFB, -1 // Terminator. @@ -241,7 +245,8 @@ TEST_F(AcmReceiverTest, SampleRate) { } // Changing playout mode to FAX should not change the background noise mode. -TEST_F(AcmReceiverTest, PlayoutModeAndBackgroundNoiseMode) { +TEST_F(AcmReceiverTest, + DISABLED_ON_ANDROID(PlayoutModeAndBackgroundNoiseMode)) { EXPECT_EQ(kBgnOn, receiver_->BackgroundNoiseModeForTest()); // Default receiver_->SetPlayoutMode(voice); @@ -269,7 +274,7 @@ TEST_F(AcmReceiverTest, PlayoutModeAndBackgroundNoiseMode) { EXPECT_EQ(kBgnOn, receiver_->BackgroundNoiseModeForTest()); } -TEST_F(AcmReceiverTest, PostdecodingVad) { +TEST_F(AcmReceiverTest, DISABLED_ON_ANDROID(PostdecodingVad)) { receiver_->EnableVad(); EXPECT_TRUE(receiver_->vad_enabled()); @@ -297,7 +302,7 @@ TEST_F(AcmReceiverTest, PostdecodingVad) { EXPECT_EQ(AudioFrame::kVadUnknown, frame.vad_activity_); } -TEST_F(AcmReceiverTest, FlushBuffer) { +TEST_F(AcmReceiverTest, DISABLED_ON_ANDROID(FlushBuffer)) { const int id = ACMCodecDB::kISAC; EXPECT_EQ(0, receiver_->AddCodec(id, codecs_[id].pltype, codecs_[id].channels, NULL)); @@ -314,7 +319,7 @@ TEST_F(AcmReceiverTest, FlushBuffer) { ASSERT_EQ(0, statistics.currentBufferSize); } -TEST_F(AcmReceiverTest, PlayoutTimestamp) { +TEST_F(AcmReceiverTest, DISABLED_ON_ANDROID(PlayoutTimestamp)) { const int id = ACMCodecDB::kPCM16Bwb; EXPECT_EQ(0, receiver_->AddCodec(id, codecs_[id].pltype, codecs_[id].channels, NULL)); @@ -346,7 +351,7 @@ TEST_F(AcmReceiverTest, PlayoutTimestamp) { } } -TEST_F(AcmReceiverTest, LastAudioCodec) { +TEST_F(AcmReceiverTest, DISABLED_ON_ANDROID(LastAudioCodec)) { const int kCodecId[] = { ACMCodecDB::kISAC, ACMCodecDB::kPCMA, ACMCodecDB::kISACSWB, ACMCodecDB::kPCM16Bswb32kHz, ACMCodecDB::kG722_1C_48, @@ -415,4 +420,6 @@ TEST_F(AcmReceiverTest, LastAudioCodec) { } } +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_red.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_red.cc index f4a1f6f2a587..4f2c70cfd077 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_red.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_red.cc @@ -15,6 +15,8 @@ namespace webrtc { +namespace acm2 { + ACMRED::ACMRED(int16_t codec_id) { codec_id_ = codec_id; } ACMRED::~ACMRED() {} @@ -47,4 +49,6 @@ void ACMRED::DestructEncoderSafe() { // RED has no instance } +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_red.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_red.h index ab8d913fa86a..ac381709de99 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_red.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_red.h @@ -15,6 +15,8 @@ namespace webrtc { +namespace acm2 { + class ACMRED : public ACMGenericCodec { public: explicit ACMRED(int16_t codec_id); @@ -35,6 +37,8 @@ class ACMRED : public ACMGenericCodec { void InternalDestructEncoderInst(void* ptr_inst); }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_RED_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_resampler.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_resampler.cc index 13eed0ba6d11..3abe4f1ec461 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_resampler.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_resampler.cc @@ -19,6 +19,8 @@ namespace webrtc { +namespace acm2 { + ACMResampler::ACMResampler() : resampler_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()) { } @@ -63,4 +65,6 @@ int ACMResampler::Resample10Msec(const int16_t* in_audio, return out_len / num_audio_channels; } +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_resampler.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_resampler.h index 8abb2f4f7ca3..e992955f5f3f 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_resampler.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_resampler.h @@ -18,6 +18,8 @@ namespace webrtc { class CriticalSectionWrapper; +namespace acm2 { + class ACMResampler { public: ACMResampler(); @@ -35,6 +37,8 @@ class ACMResampler { CriticalSectionWrapper* resampler_crit_sect_; }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_RESAMPLER_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_speex.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_speex.cc index 829026549d5b..84a0592a400a 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_speex.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_speex.cc @@ -21,6 +21,8 @@ namespace webrtc { +namespace acm2 { + #ifndef WEBRTC_CODEC_SPEEX ACMSPEEX::ACMSPEEX(int16_t /* codec_id */) : encoder_inst_ptr_(NULL), @@ -326,4 +328,6 @@ int16_t ACMSPEEX::SetComplMode(int16_t mode) { #endif +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_speex.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_speex.h index 2fac8fd2e965..f9cf78706840 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_speex.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_speex.h @@ -19,6 +19,8 @@ struct SPEEX_decinst_t_; namespace webrtc { +namespace acm2 { + class ACMSPEEX : public ACMGenericCodec { public: explicit ACMSPEEX(int16_t codec_id); @@ -60,6 +62,8 @@ class ACMSPEEX : public ACMGenericCodec { uint16_t samples_in_20ms_audio_; }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_SPEEX_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/audio_coding_module.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/audio_coding_module.cc index 7acd49bdf5e7..60ed69cb29c9 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/audio_coding_module.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/audio_coding_module.cc @@ -19,6 +19,9 @@ namespace webrtc { +const char kLegacyAcmVersion[] = "acm1"; +const char kExperimentalAcmVersion[] = "acm2"; + // Create module AudioCodingModule* AudioCodingModule::Create(int id) { return new acm1::AudioCodingModuleImpl(id, Clock::GetRealTimeClock()); @@ -30,13 +33,13 @@ AudioCodingModule* AudioCodingModule::Create(int id, Clock* clock) { // Get number of supported codecs int AudioCodingModule::NumberOfCodecs() { - return ACMCodecDB::kNumCodecs; + return acm2::ACMCodecDB::kNumCodecs; } // Get supported codec parameters with id int AudioCodingModule::Codec(int list_id, CodecInst* codec) { // Get the codec settings for the codec with the given list ID - return ACMCodecDB::Codec(list_id, codec); + return acm2::ACMCodecDB::Codec(list_id, codec); } // Get supported codec parameters with name, frequency and number of channels. @@ -47,7 +50,8 @@ int AudioCodingModule::Codec(const char* payload_name, int codec_id; // Get the id of the codec from the database. - codec_id = ACMCodecDB::CodecId(payload_name, sampling_freq_hz, channels); + codec_id = acm2::ACMCodecDB::CodecId( + payload_name, sampling_freq_hz, channels); if (codec_id < 0) { // We couldn't find a matching codec, set the parameters to unacceptable // values and return. @@ -60,7 +64,7 @@ int AudioCodingModule::Codec(const char* payload_name, } // Get default codec settings. - ACMCodecDB::Codec(codec_id, codec); + acm2::ACMCodecDB::Codec(codec_id, codec); // Keep the number of channels from the function call. For most codecs it // will be the same value as in default codec settings, but not for all. @@ -73,14 +77,14 @@ int AudioCodingModule::Codec(const char* payload_name, int AudioCodingModule::Codec(const char* payload_name, int sampling_freq_hz, int channels) { - return ACMCodecDB::CodecId(payload_name, sampling_freq_hz, channels); + return acm2::ACMCodecDB::CodecId(payload_name, sampling_freq_hz, channels); } // Checks the validity of the parameters of the given codec bool AudioCodingModule::IsCodecValid(const CodecInst& codec) { int mirror_id; - int codec_number = ACMCodecDB::CodecNumber(codec, &mirror_id); + int codec_number = acm2::ACMCodecDB::CodecNumber(codec, &mirror_id); if (codec_number < 0) { WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, -1, @@ -97,7 +101,7 @@ AudioCodingModule* AudioCodingModuleFactory::Create(int id) const { } AudioCodingModule* NewAudioCodingModuleFactory::Create(int id) const { - return new AudioCodingModuleImpl(id); + return new acm2::AudioCodingModuleImpl(id); } } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/audio_coding_module.gypi b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/audio_coding_module.gypi index 1ffb7ae51759..35b72f97b1fd 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/audio_coding_module.gypi +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/audio_coding_module.gypi @@ -21,11 +21,13 @@ 'include_dirs': [ '../interface', '../../../interface', + '<(webrtc_root)', ], 'direct_dependent_settings': { 'include_dirs': [ '../interface', '../../../interface', + '<(webrtc_root)', ], }, 'sources': [ @@ -71,6 +73,8 @@ 'audio_coding_module.cc', 'audio_coding_module_impl.cc', 'audio_coding_module_impl.h', + 'call_statistics.cc', + 'call_statistics.h', 'initial_delay_manager.cc', 'initial_delay_manager.h', 'nack.cc', diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.cc index 3a7d2718102f..4c64e07dd5ce 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.cc @@ -20,6 +20,7 @@ #include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h" #include "webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h" #include "webrtc/modules/audio_coding/main/acm2/acm_resampler.h" +#include "webrtc/modules/audio_coding/main/acm2/call_statistics.h" #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" #include "webrtc/system_wrappers/interface/rw_lock_wrapper.h" #include "webrtc/system_wrappers/interface/trace.h" @@ -27,6 +28,8 @@ namespace webrtc { +namespace acm2 { + enum { kACMToneEnd = 999 }; @@ -138,7 +141,6 @@ AudioCodingModuleImpl::AudioCodingModuleImpl(int id) receiver_initialized_(false), callback_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()), secondary_send_codec_inst_(), - secondary_encoder_(NULL), codec_timestamp_(expected_codec_ts_), first_10ms_data_(false) { @@ -1469,7 +1471,7 @@ int AudioCodingModuleImpl::SetVADSafe(bool enable_dtx, // If a send codec is registered, set VAD/DTX for the codec. if (HaveValidEncoder("SetVAD") && codecs_[current_send_codec_idx_]->SetVAD( - &enable_dtx, &enable_vad, &mode) < 0) { + &dtx_enabled_, &vad_enabled_, &vad_mode_) < 0) { // SetVAD failed. WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_, "SetVAD failed"); @@ -1536,8 +1538,7 @@ int AudioCodingModuleImpl::InitializeReceiverSafe() { // removing and registering a decoder we can achieve the effect of resetting. // Reset the decoder state. int AudioCodingModuleImpl::ResetDecoder() { - CriticalSectionScoped lock(acm_crit_sect_); - return -1; + return 0; } // Get current receive frequency. @@ -1975,4 +1976,15 @@ int AudioCodingModuleImpl::LeastRequiredDelayMs() const { return receiver_.LeastRequiredDelayMs(); } +const char* AudioCodingModuleImpl::Version() const { + return kExperimentalAcmVersion; +} + +void AudioCodingModuleImpl::GetDecodingCallStatistics( + AudioDecodingCallStats* call_stats) const { + receiver_.GetDecodingCallStatistics(call_stats); +} + +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h index 435c7aeab89a..bc4ea0f7a669 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h @@ -22,19 +22,21 @@ namespace webrtc { -class ACMDTMFDetection; -class ACMGenericCodec; class CriticalSectionWrapper; class RWLockWrapper; +namespace acm2 { + +class ACMDTMFDetection; +class ACMGenericCodec; + class AudioCodingModuleImpl : public AudioCodingModule { public: - // Constructor explicit AudioCodingModuleImpl(int id); - - // Destructor ~AudioCodingModuleImpl(); + virtual const char* Version() const; + // Change the unique identifier of this object. virtual int32_t ChangeUniqueId(const int32_t id); @@ -226,6 +228,8 @@ class AudioCodingModuleImpl : public AudioCodingModule { std::vector GetNackList(int round_trip_time_ms) const; + void GetDecodingCallStatistics(AudioDecodingCallStats* stats) const; + private: int UnregisterReceiveCodecSafe(int payload_type); @@ -349,6 +353,8 @@ class AudioCodingModuleImpl : public AudioCodingModule { bool first_10ms_data_; }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_AUDIO_CODING_MODULE_IMPL_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/call_statistics.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/call_statistics.cc new file mode 100644 index 000000000000..9153325afaf9 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/call_statistics.cc @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/audio_coding/main/acm2/call_statistics.h" + +#include + +namespace webrtc { + +namespace acm2 { + +void CallStatistics::DecodedByNetEq(AudioFrame::SpeechType speech_type) { + ++decoding_stat_.calls_to_neteq; + switch (speech_type) { + case AudioFrame::kNormalSpeech: { + ++decoding_stat_.decoded_normal; + break; + } + case AudioFrame::kPLC: { + ++decoding_stat_.decoded_plc; + break; + } + case AudioFrame::kCNG: { + ++decoding_stat_.decoded_cng; + break; + } + case AudioFrame::kPLCCNG: { + ++decoding_stat_.decoded_plc_cng; + break; + } + case AudioFrame::kUndefined: { + // If the audio is decoded by NetEq, |kUndefined| is not an option. + assert(false); + } + } +} + +void CallStatistics::DecodedBySilenceGenerator() { + ++decoding_stat_.calls_to_silence_generator; +} + +const AudioDecodingCallStats& CallStatistics::GetDecodingStatistics() const { + return decoding_stat_; +} + +} // namespace acm2 + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/call_statistics.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/call_statistics.h new file mode 100644 index 000000000000..2aece0ff400e --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/call_statistics.h @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_CALL_STATISTICS_H_ +#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_CALL_STATISTICS_H_ + +#include "webrtc/common_types.h" +#include "webrtc/modules/interface/module_common_types.h" + +// +// This class is for book keeping of calls to ACM. It is not useful to log API +// calls which are supposed to be called every 10ms, e.g. PlayoutData10Ms(), +// however, it is useful to know the number of such calls in a given time +// interval. The current implementation covers calls to PlayoutData10Ms() with +// detailed accounting of the decoded speech type. +// +// Thread Safety +// ============= +// Please note that this class in not thread safe. The class must be protected +// if different APIs are called from different threads. +// + +namespace webrtc { + +namespace acm2 { + +class CallStatistics { + public: + CallStatistics() {} + ~CallStatistics() {} + + // Call this method to indicate that NetEq engaged in decoding. |speech_type| + // is the audio-type according to NetEq. + void DecodedByNetEq(AudioFrame::SpeechType speech_type); + + // Call this method to indicate that a decoding call resulted in generating + // silence, i.e. call to NetEq is bypassed and the output audio is zero. + void DecodedBySilenceGenerator(); + + // Get statistics for decoding. The statistics include the number of calls to + // NetEq and silence generator, as well as the type of speech pulled of off + // NetEq, c.f. declaration of AudioDecodingCallStats for detailed description. + const AudioDecodingCallStats& GetDecodingStatistics() const; + + private: + // Reset the decoding statistics. + void ResetDecodingStatistics(); + + AudioDecodingCallStats decoding_stat_; +}; + +} // namespace acm2 + +} // namespace webrtc + +#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_CALL_STATISTICS_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/call_statistics_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/call_statistics_unittest.cc new file mode 100644 index 000000000000..61aadd73cf15 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/call_statistics_unittest.cc @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "gtest/gtest.h" +#include "webrtc/modules/audio_coding/main/acm2/call_statistics.h" + +namespace webrtc { + +namespace acm2 { + +TEST(CallStatisticsTest, InitializedZero) { + CallStatistics call_stats; + AudioDecodingCallStats stats; + + stats = call_stats.GetDecodingStatistics(); + EXPECT_EQ(0, stats.calls_to_neteq); + EXPECT_EQ(0, stats.calls_to_silence_generator); + EXPECT_EQ(0, stats.decoded_normal); + EXPECT_EQ(0, stats.decoded_cng); + EXPECT_EQ(0, stats.decoded_plc); + EXPECT_EQ(0, stats.decoded_plc_cng); +} + +TEST(CallStatisticsTest, AllCalls) { + CallStatistics call_stats; + AudioDecodingCallStats stats; + + call_stats.DecodedBySilenceGenerator(); + call_stats.DecodedByNetEq(AudioFrame::kNormalSpeech); + call_stats.DecodedByNetEq(AudioFrame::kPLC); + call_stats.DecodedByNetEq(AudioFrame::kPLCCNG); + call_stats.DecodedByNetEq(AudioFrame::kCNG); + + stats = call_stats.GetDecodingStatistics(); + EXPECT_EQ(4, stats.calls_to_neteq); + EXPECT_EQ(1, stats.calls_to_silence_generator); + EXPECT_EQ(1, stats.decoded_normal); + EXPECT_EQ(1, stats.decoded_cng); + EXPECT_EQ(1, stats.decoded_plc); + EXPECT_EQ(1, stats.decoded_plc_cng); +} + +} // namespace acm2 + +} // namespace webrtc + + + diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.cc index ac79aa59c270..c2b218cb6cfe 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.cc @@ -12,6 +12,8 @@ namespace webrtc { +namespace acm2 { + InitialDelayManager::InitialDelayManager(int initial_delay_ms, int late_packet_threshold) : last_packet_type_(kUndefinedPacket), @@ -22,7 +24,12 @@ InitialDelayManager::InitialDelayManager(int initial_delay_ms, buffered_audio_ms_(0), buffering_(true), playout_timestamp_(0), - late_packet_threshold_(late_packet_threshold) {} + late_packet_threshold_(late_packet_threshold) { + last_packet_rtp_info_.header.payloadType = kInvalidPayloadType; + last_packet_rtp_info_.header.ssrc = 0; + last_packet_rtp_info_.header.sequenceNumber = 0; + last_packet_rtp_info_.header.timestamp = 0; +} void InitialDelayManager::UpdateLastReceivedPacket( const WebRtcRTPHeader& rtp_info, @@ -53,7 +60,9 @@ void InitialDelayManager::UpdateLastReceivedPacket( return; } - if (new_codec) { + // Either if it is a new packet or the first packet record and set variables. + if (new_codec || + last_packet_rtp_info_.header.payloadType == kInvalidPayloadType) { timestamp_step_ = 0; if (type == kAudioPacket) audio_payload_type_ = rtp_info.header.payloadType; @@ -220,4 +229,6 @@ void InitialDelayManager::UpdatePlayoutTimestamp( initial_delay_ms_ * sample_rate_hz / 1000); } +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h index da08f8bd873c..3c5ba3c0139a 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h @@ -16,6 +16,8 @@ namespace webrtc { +namespace acm2 { + class InitialDelayManager { public: enum PacketType { @@ -110,6 +112,8 @@ class InitialDelayManager { const int late_packet_threshold_; }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_INITIAL_DELAY_MANAGER_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/initial_delay_manager_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/initial_delay_manager_unittest.cc index 7e3bda5b5016..1e129f37e90e 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/initial_delay_manager_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/initial_delay_manager_unittest.cc @@ -15,6 +15,8 @@ namespace webrtc { +namespace acm2 { + namespace { const uint8_t kAudioPayloadType = 0; @@ -368,4 +370,6 @@ TEST_F(InitialDelayManagerTest, BufferingAudio) { EXPECT_FALSE(manager_->buffering()); } +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/nack.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/nack.cc index e26ad611f769..7265fe63c55a 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/nack.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/nack.cc @@ -19,6 +19,8 @@ namespace webrtc { +namespace acm2 { + namespace { const int kDefaultSampleRateKhz = 48; @@ -222,4 +224,6 @@ std::vector Nack::GetNackList(int round_trip_time_ms) const { return sequence_numbers; } +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/nack.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/nack.h index 490c038187d0..380932719923 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/nack.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/nack.h @@ -49,6 +49,8 @@ // namespace webrtc { +namespace acm2 { + class Nack { public: // A limit for the size of the NACK list. @@ -204,6 +206,8 @@ class Nack { size_t max_nack_list_size_; }; +} // namespace acm2 + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_NACK_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/nack_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/nack_unittest.cc index b047fd6d0189..8011d8856c09 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/nack_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/nack_unittest.cc @@ -21,6 +21,8 @@ namespace webrtc { +namespace acm2 { + namespace { const int kNackThreshold = 3; @@ -479,4 +481,6 @@ TEST(NackTest, RoudTripTimeIsApplied) { EXPECT_EQ(5, nack_list[1]); } +} // namespace acm2 + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/interface/audio_coding_module.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/interface/audio_coding_module.h index 2f4776d7546b..db45addde227 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/interface/audio_coding_module.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/interface/audio_coding_module.h @@ -73,6 +73,10 @@ class ACMVQMonCallback { const uint16_t delayMS) = 0; // average delay in ms }; +// Version string for testing, to distinguish instances of ACM1 from ACM2. +extern const char kLegacyAcmVersion[]; +extern const char kExperimentalAcmVersion[]; + class AudioCodingModule: public Module { protected: AudioCodingModule() {} @@ -174,6 +178,11 @@ class AudioCodingModule: public Module { // static bool IsCodecValid(const CodecInst& codec); + // Returns the version of ACM. This facilitates distinguishing instances of + // ACM1 from ACM2 while testing. This API will be removed when ACM1 is + // completely removed. + virtual const char* Version() const = 0; + /////////////////////////////////////////////////////////////////////////// // Sender // @@ -922,6 +931,9 @@ class AudioCodingModule: public Module { // is returned. // virtual std::vector GetNackList(int round_trip_time_ms) const = 0; + + virtual void GetDecodingCallStatistics( + AudioDecodingCallStats* call_stats) const = 0; }; struct AudioCodingModuleFactory { diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/acm_generic_codec.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/acm_generic_codec.cc index fe0a13ca4cd8..4e53b873a1f0 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/acm_generic_codec.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/acm_generic_codec.cc @@ -170,6 +170,7 @@ int32_t ACMGenericCodec::Add10MsDataSafe(const uint32_t timestamp, memmove(in_timestamp_, in_timestamp_ + missed_10ms_blocks, (in_timestamp_ix_write_ - missed_10ms_blocks) * sizeof(uint32_t)); in_timestamp_ix_write_ -= missed_10ms_blocks; + assert(in_timestamp_ix_write_ >= 0); in_timestamp_[in_timestamp_ix_write_] = timestamp; in_timestamp_ix_write_++; @@ -351,7 +352,7 @@ int16_t ACMGenericCodec::Encode(uint8_t* bitstream, (in_timestamp_ix_write_ - num_10ms_blocks) * sizeof(int32_t)); } in_timestamp_ix_write_ -= num_10ms_blocks; - + assert(in_timestamp_ix_write_ >= 0); // Remove encoded audio and move next audio to be encoded to the beginning // of the buffer. Accordingly, adjust the read and write indices. if (in_audio_ix_read_ < in_audio_ix_write_) { @@ -359,6 +360,7 @@ int16_t ACMGenericCodec::Encode(uint8_t* bitstream, (in_audio_ix_write_ - in_audio_ix_read_) * sizeof(int16_t)); } in_audio_ix_write_ -= in_audio_ix_read_; + assert(in_timestamp_ix_write_ >= 0); in_audio_ix_read_ = 0; last_encoded_timestamp_ = *timestamp; return (status < 0) ? (-1) : (*bitstream_len_byte); @@ -574,20 +576,23 @@ int16_t ACMGenericCodec::InitEncoderSafe(WebRtcACMCodecParams* codec_params, if (in_audio_ == NULL) { return -1; } - memset(in_audio_, 0, AUDIO_BUFFER_SIZE_W16 * sizeof(int16_t)); } if (in_timestamp_ == NULL) { in_timestamp_ = new uint32_t[TIMESTAMP_BUFFER_SIZE_W32]; if (in_timestamp_ == NULL) { return -1; } - memset(in_timestamp_, 0, sizeof(uint32_t) * TIMESTAMP_BUFFER_SIZE_W32); } + // Fresh start for audio buffer. is_audio_buff_fresh_ = true; + memset(in_audio_, 0, AUDIO_BUFFER_SIZE_W16 * sizeof(int16_t)); + memset(in_timestamp_, 0, sizeof(uint32_t) * TIMESTAMP_BUFFER_SIZE_W32); + in_audio_ix_write_ = 0; + in_audio_ix_read_ = 0; + in_timestamp_ix_write_ = 0; } status = SetVADSafe(&codec_params->enable_dtx, &codec_params->enable_vad, &codec_params->vad_mode); - return status; } diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/acm_generic_codec.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/acm_generic_codec.h index d6403f5c6c84..c1f9cdc554a0 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/acm_generic_codec.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/acm_generic_codec.h @@ -27,6 +27,7 @@ namespace webrtc { // forward declaration struct CodecInst; +struct WebRtcACMCodecParams; namespace acm1 { diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/audio_coding_module.gypi b/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/audio_coding_module.gypi index 88e70d461cfe..d8be56d5ba2c 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/audio_coding_module.gypi +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/audio_coding_module.gypi @@ -56,11 +56,13 @@ 'include_dirs': [ '../interface', '../../../interface', + '<(webrtc_root)', ], 'direct_dependent_settings': { 'include_dirs': [ '../interface', '../../../interface', + '<(webrtc_root)', ], }, 'sources': [ @@ -84,54 +86,52 @@ 'acm_resampler.h', 'audio_coding_module_impl.cc', 'audio_coding_module_impl.h', - 'nack.cc', - 'nack.h', ], - 'conditions': [ - ['include_opus==1', { - 'sources': [ - 'acm_opus.cc', - 'acm_opus.h', + 'conditions': [ + ['include_opus==1', { + 'sources': [ + 'acm_opus.cc', + 'acm_opus.h', + ], + }], + ['include_g711==1', { + 'sources': [ + 'acm_pcma.cc', + 'acm_pcma.h', + 'acm_pcmu.cc', + 'acm_pcmu.h', + ], + }], + ['include_g722==1', { + 'sources': [ + 'acm_g722.cc', + 'acm_g722.h', + 'acm_g7221.cc', + 'acm_g7221.h', + 'acm_g7221c.cc', + 'acm_g7221c.h', + ], + }], + ['include_ilbc==1', { + 'sources': [ + 'acm_ilbc.cc', + 'acm_ilbc.h', + ], + }], + ['include_isac==1', { + 'sources': [ + 'acm_isac.cc', + 'acm_isac.h', + 'acm_isac_macros.h', + ], + }], + ['include_pcm16b==1', { + 'sources': [ + 'acm_pcm16b.cc', + 'acm_pcm16b.h', + ], + }], ], - }], - ['include_g711==1', { - 'sources': [ - 'acm_pcma.cc', - 'acm_pcma.h', - 'acm_pcmu.cc', - 'acm_pcmu.h', - ], - }], - ['include_g722==1', { - 'sources': [ - 'acm_g722.cc', - 'acm_g722.h', - 'acm_g7221.cc', - 'acm_g7221.h', - 'acm_g7221c.cc', - 'acm_g7221c.h', - ], - }], - ['include_ilbc==1', { - 'sources': [ - 'acm_ilbc.cc', - 'acm_ilbc.h', - ], - }], - ['include_isac==1', { - 'sources': [ - 'acm_isac.cc', - 'acm_isac.h', - 'acm_isac_macros.h', - ], - }], - ['include_pcm16b==1', { - 'sources': [ - 'acm_pcm16b.cc', - 'acm_pcm16b.h', - ], - }], - ], }, ], 'conditions': [ @@ -143,7 +143,7 @@ 'dependencies': [ 'audio_coding_module', '<(DEPTH)/testing/gtest.gyp:gtest', - '<(webrtc_root)/test/test.gyp:test_support_main', + '<(webrtc_root)/test/test.gyp:test_support', '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', '<(DEPTH)/third_party/gflags/gflags.gyp:gflags', ], @@ -151,6 +151,7 @@ '../test/delay_test.cc', '../test/Channel.cc', '../test/PCMFile.cc', + '../test/utility.cc', ], }, # delay_test { @@ -159,7 +160,7 @@ 'dependencies': [ 'audio_coding_module', '<(DEPTH)/testing/gtest.gyp:gtest', - '<(webrtc_root)/test/test.gyp:test_support_main', + '<(webrtc_root)/test/test.gyp:test_support', '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', '<(DEPTH)/third_party/gflags/gflags.gyp:gflags', ], diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.cc index f5f84505fe38..556f530ecfd5 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.cc @@ -18,10 +18,11 @@ #include "webrtc/engine_configurations.h" #include "webrtc/modules/audio_coding/main/source/acm_codec_database.h" #include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h" +#include "webrtc/modules/audio_coding/main/acm2/call_statistics.h" #include "webrtc/modules/audio_coding/main/source/acm_dtmf_detection.h" #include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h" #include "webrtc/modules/audio_coding/main/source/acm_resampler.h" -#include "webrtc/modules/audio_coding/main/source/nack.h" +#include "webrtc/modules/audio_coding/main/acm2/nack.h" #include "webrtc/system_wrappers/interface/clock.h" #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" #include "webrtc/system_wrappers/interface/logging.h" @@ -154,7 +155,6 @@ AudioCodingModuleImpl::AudioCodingModuleImpl(const int32_t id, Clock* clock) last_detected_tone_(kACMToneEnd), callback_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()), secondary_send_codec_inst_(), - secondary_encoder_(NULL), initial_delay_ms_(0), num_packets_accumulated_(0), num_bytes_accumulated_(0), @@ -2274,6 +2274,9 @@ int32_t AudioCodingModuleImpl::PlayoutData10Ms( { CriticalSectionScoped lock(acm_crit_sect_); + // Update call statistics. + call_stats_.DecodedByNetEq(audio_frame->speech_type_); + if (update_nack) { assert(nack_.get()); nack_->UpdateLastDecodedPacket(decoded_seq_num, decoded_timestamp); @@ -2880,6 +2883,9 @@ bool AudioCodingModuleImpl::GetSilence(int desired_sample_rate_hz, return false; } + // Record call to silence generator. + call_stats_.DecodedBySilenceGenerator(); + // We stop accumulating packets, if the number of packets or the total size // exceeds a threshold. int max_num_packets; @@ -3002,12 +3008,13 @@ int AudioCodingModuleImpl::LeastRequiredDelayMs() const { int AudioCodingModuleImpl::EnableNack(size_t max_nack_list_size) { // Don't do anything if |max_nack_list_size| is out of range. - if (max_nack_list_size == 0 || max_nack_list_size > Nack::kNackListSizeLimit) + if (max_nack_list_size == 0 || + max_nack_list_size > acm2::Nack::kNackListSizeLimit) return -1; CriticalSectionScoped lock(acm_crit_sect_); if (!nack_enabled_) { - nack_.reset(Nack::Create(kNackThresholdPackets)); + nack_.reset(acm2::Nack::Create(kNackThresholdPackets)); nack_enabled_ = true; // Sampling rate might need to be updated if we change from disable to @@ -3026,6 +3033,16 @@ void AudioCodingModuleImpl::DisableNack() { nack_enabled_ = false; } +const char* AudioCodingModuleImpl::Version() const { + return kLegacyAcmVersion; +} + +void AudioCodingModuleImpl::GetDecodingCallStatistics( + AudioDecodingCallStats* call_stats) const { + CriticalSectionScoped lock(acm_crit_sect_); + *call_stats = call_stats_.GetDecodingStatistics(); +} + } // namespace acm1 } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.h index b63ae09da20c..f0b22f11465a 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.h @@ -19,6 +19,7 @@ #include "webrtc/modules/audio_coding/main/source/acm_codec_database.h" #include "webrtc/modules/audio_coding/main/source/acm_neteq.h" #include "webrtc/modules/audio_coding/main/source/acm_resampler.h" +#include "webrtc/modules/audio_coding/main/acm2/call_statistics.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" namespace webrtc { @@ -29,20 +30,22 @@ class CriticalSectionWrapper; class RWLockWrapper; class Clock; +namespace acm2 { +class Nack; +} + namespace acm1 { class ACMDTMFDetection; class ACMGenericCodec; -class Nack; class AudioCodingModuleImpl : public AudioCodingModule { public: - // Constructor AudioCodingModuleImpl(const int32_t id, Clock* clock); - - // Destructor ~AudioCodingModuleImpl(); + virtual const char* Version() const; + // Change the unique identifier of this object. virtual int32_t ChangeUniqueId(const int32_t id); @@ -301,6 +304,8 @@ class AudioCodingModuleImpl : public AudioCodingModule { // Disable NACK. void DisableNack(); + void GetDecodingCallStatistics(AudioDecodingCallStats* call_stats) const; + private: // Change required states after starting to receive the codec corresponding // to |index|. @@ -437,8 +442,10 @@ class AudioCodingModuleImpl : public AudioCodingModule { int64_t last_receive_timestamp_; Clock* clock_; - scoped_ptr nack_; + scoped_ptr nack_; bool nack_enabled_; + + acm2::CallStatistics call_stats_; }; } // namespace acm1 diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/nack.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/nack.cc deleted file mode 100644 index 4ca260ddc2f6..000000000000 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/nack.cc +++ /dev/null @@ -1,229 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "webrtc/modules/audio_coding/main/source/nack.h" - -#include // For assert. - -#include // For std::max. - -#include "webrtc/modules/interface/module_common_types.h" -#include "webrtc/system_wrappers/interface/logging.h" - -namespace webrtc { - -namespace acm1 { - -namespace { - -const int kDefaultSampleRateKhz = 48; -const int kDefaultPacketSizeMs = 20; - -} // namespace - -Nack::Nack(int nack_threshold_packets) - : nack_threshold_packets_(nack_threshold_packets), - sequence_num_last_received_rtp_(0), - timestamp_last_received_rtp_(0), - any_rtp_received_(false), - sequence_num_last_decoded_rtp_(0), - timestamp_last_decoded_rtp_(0), - any_rtp_decoded_(false), - sample_rate_khz_(kDefaultSampleRateKhz), - samples_per_packet_(sample_rate_khz_ * kDefaultPacketSizeMs), - max_nack_list_size_(kNackListSizeLimit) {} - -Nack* Nack::Create(int nack_threshold_packets) { - return new Nack(nack_threshold_packets); -} - -void Nack::UpdateSampleRate(int sample_rate_hz) { - assert(sample_rate_hz > 0); - sample_rate_khz_ = sample_rate_hz / 1000; -} - -void Nack::UpdateLastReceivedPacket(uint16_t sequence_number, - uint32_t timestamp) { - // Just record the value of sequence number and timestamp if this is the - // first packet. - if (!any_rtp_received_) { - sequence_num_last_received_rtp_ = sequence_number; - timestamp_last_received_rtp_ = timestamp; - any_rtp_received_ = true; - // If no packet is decoded, to have a reasonable estimate of time-to-play - // use the given values. - if (!any_rtp_decoded_) { - sequence_num_last_decoded_rtp_ = sequence_number; - timestamp_last_decoded_rtp_ = timestamp; - } - return; - } - - if (sequence_number == sequence_num_last_received_rtp_) - return; - - // Received RTP should not be in the list. - nack_list_.erase(sequence_number); - - // If this is an old sequence number, no more action is required, return. - if (IsNewerSequenceNumber(sequence_num_last_received_rtp_, sequence_number)) - return; - - UpdateSamplesPerPacket(sequence_number, timestamp); - - UpdateList(sequence_number); - - sequence_num_last_received_rtp_ = sequence_number; - timestamp_last_received_rtp_ = timestamp; - LimitNackListSize(); -} - -void Nack::UpdateSamplesPerPacket(uint16_t sequence_number_current_received_rtp, - uint32_t timestamp_current_received_rtp) { - uint32_t timestamp_increase = timestamp_current_received_rtp - - timestamp_last_received_rtp_; - uint16_t sequence_num_increase = sequence_number_current_received_rtp - - sequence_num_last_received_rtp_; - - samples_per_packet_ = timestamp_increase / sequence_num_increase; -} - -void Nack::UpdateList(uint16_t sequence_number_current_received_rtp) { - // Some of the packets which were considered late, now are considered missing. - ChangeFromLateToMissing(sequence_number_current_received_rtp); - - if (IsNewerSequenceNumber(sequence_number_current_received_rtp, - sequence_num_last_received_rtp_ + 1)) - AddToList(sequence_number_current_received_rtp); -} - -void Nack::ChangeFromLateToMissing( - uint16_t sequence_number_current_received_rtp) { - NackList::const_iterator lower_bound = nack_list_.lower_bound( - static_cast(sequence_number_current_received_rtp - - nack_threshold_packets_)); - - for (NackList::iterator it = nack_list_.begin(); it != lower_bound; ++it) - it->second.is_missing = true; -} - -uint32_t Nack::EstimateTimestamp(uint16_t sequence_num) { - uint16_t sequence_num_diff = sequence_num - sequence_num_last_received_rtp_; - return sequence_num_diff * samples_per_packet_ + timestamp_last_received_rtp_; -} - -void Nack::AddToList(uint16_t sequence_number_current_received_rtp) { - assert(!any_rtp_decoded_ || IsNewerSequenceNumber( - sequence_number_current_received_rtp, sequence_num_last_decoded_rtp_)); - - // Packets with sequence numbers older than |upper_bound_missing| are - // considered missing, and the rest are considered late. - uint16_t upper_bound_missing = sequence_number_current_received_rtp - - nack_threshold_packets_; - - for (uint16_t n = sequence_num_last_received_rtp_ + 1; - IsNewerSequenceNumber(sequence_number_current_received_rtp, n); ++n) { - bool is_missing = IsNewerSequenceNumber(upper_bound_missing, n); - uint32_t timestamp = EstimateTimestamp(n); - NackElement nack_element(TimeToPlay(timestamp), timestamp, is_missing); - nack_list_.insert(nack_list_.end(), std::make_pair(n, nack_element)); - } -} - -void Nack::UpdateEstimatedPlayoutTimeBy10ms() { - while (!nack_list_.empty() && - nack_list_.begin()->second.time_to_play_ms <= 10) - nack_list_.erase(nack_list_.begin()); - - for (NackList::iterator it = nack_list_.begin(); it != nack_list_.end(); ++it) - it->second.time_to_play_ms -= 10; -} - -void Nack::UpdateLastDecodedPacket(uint16_t sequence_number, - uint32_t timestamp) { - if (IsNewerSequenceNumber(sequence_number, sequence_num_last_decoded_rtp_) || - !any_rtp_decoded_) { - sequence_num_last_decoded_rtp_ = sequence_number; - timestamp_last_decoded_rtp_ = timestamp; - // Packets in the list with sequence numbers less than the - // sequence number of the decoded RTP should be removed from the lists. - // They will be discarded by the jitter buffer if they arrive. - nack_list_.erase(nack_list_.begin(), nack_list_.upper_bound( - sequence_num_last_decoded_rtp_)); - - // Update estimated time-to-play. - for (NackList::iterator it = nack_list_.begin(); it != nack_list_.end(); - ++it) - it->second.time_to_play_ms = TimeToPlay(it->second.estimated_timestamp); - } else { - assert(sequence_number == sequence_num_last_decoded_rtp_); - - // Same sequence number as before. 10 ms is elapsed, update estimations for - // time-to-play. - UpdateEstimatedPlayoutTimeBy10ms(); - - // Update timestamp for better estimate of time-to-play, for packets which - // are added to NACK list later on. - timestamp_last_decoded_rtp_ += sample_rate_khz_ * 10; - } - any_rtp_decoded_ = true; -} - -Nack::NackList Nack::GetNackList() const { - return nack_list_; -} - -void Nack::Reset() { - nack_list_.clear(); - - sequence_num_last_received_rtp_ = 0; - timestamp_last_received_rtp_ = 0; - any_rtp_received_ = false; - sequence_num_last_decoded_rtp_ = 0; - timestamp_last_decoded_rtp_ = 0; - any_rtp_decoded_ = false; - sample_rate_khz_ = kDefaultSampleRateKhz; - samples_per_packet_ = sample_rate_khz_ * kDefaultPacketSizeMs; -} - -int Nack::SetMaxNackListSize(size_t max_nack_list_size) { - if (max_nack_list_size == 0 || max_nack_list_size > kNackListSizeLimit) - return -1; - max_nack_list_size_ = max_nack_list_size; - LimitNackListSize(); - return 0; -} - -void Nack::LimitNackListSize() { - uint16_t limit = sequence_num_last_received_rtp_ - - static_cast(max_nack_list_size_) - 1; - nack_list_.erase(nack_list_.begin(), nack_list_.upper_bound(limit)); -} - -int Nack::TimeToPlay(uint32_t timestamp) const { - uint32_t timestamp_increase = timestamp - timestamp_last_decoded_rtp_; - return timestamp_increase / sample_rate_khz_; -} - -// We don't erase elements with time-to-play shorter than round-trip-time. -std::vector Nack::GetNackList(int round_trip_time_ms) const { - std::vector sequence_numbers; - for (NackList::const_iterator it = nack_list_.begin(); it != nack_list_.end(); - ++it) { - if (it->second.is_missing && - it->second.time_to_play_ms > round_trip_time_ms) - sequence_numbers.push_back(it->first); - } - return sequence_numbers; -} - -} // namespace acm1 - -} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/nack.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/nack.h deleted file mode 100644 index 9cea15d1a21f..000000000000 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/nack.h +++ /dev/null @@ -1,213 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_NACK_H_ -#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_NACK_H_ - -#include -#include - -#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h" -#include "webrtc/system_wrappers/interface/scoped_ptr.h" -#include "webrtc/test/testsupport/gtest_prod_util.h" - -// -// The Nack class keeps track of the lost packets, an estimate of time-to-play -// for each packet is also given. -// -// Every time a packet is pushed into NetEq, LastReceivedPacket() has to be -// called to update the NACK list. -// -// Every time 10ms audio is pulled from NetEq LastDecodedPacket() should be -// called, and time-to-play is updated at that moment. -// -// If packet N is received, any packet prior to |N - NackThreshold| which is not -// arrived is considered lost, and should be labeled as "missing" (the size of -// the list might be limited and older packet eliminated from the list). Packets -// |N - NackThreshold|, |N - NackThreshold + 1|, ..., |N - 1| are considered -// "late." A "late" packet with sequence number K is changed to "missing" any -// time a packet with sequence number newer than |K + NackList| is arrived. -// -// The Nack class has to know about the sample rate of the packets to compute -// time-to-play. So sample rate should be set as soon as the first packet is -// received. If there is a change in the receive codec (sender changes codec) -// then Nack should be reset. This is because NetEQ would flush its buffer and -// re-transmission is meaning less for old packet. Therefore, in that case, -// after reset the sampling rate has to be updated. -// -// Thread Safety -// ============= -// Please note that this class in not thread safe. The class must be protected -// if different APIs are called from different threads. -// -namespace webrtc { - -namespace acm1 { - -class Nack { - public: - // A limit for the size of the NACK list. - static const size_t kNackListSizeLimit = 500; // 10 seconds for 20 ms frame - // packets. - // Factory method. - static Nack* Create(int nack_threshold_packets); - - ~Nack() {} - - // Set a maximum for the size of the NACK list. If the last received packet - // has sequence number of N, then NACK list will not contain any element - // with sequence number earlier than N - |max_nack_list_size|. - // - // The largest maximum size is defined by |kNackListSizeLimit| - int SetMaxNackListSize(size_t max_nack_list_size); - - // Set the sampling rate. - // - // If associated sampling rate of the received packets is changed, call this - // function to update sampling rate. Note that if there is any change in - // received codec then NetEq will flush its buffer and NACK has to be reset. - // After Reset() is called sampling rate has to be set. - void UpdateSampleRate(int sample_rate_hz); - - // Update the sequence number and the timestamp of the last decoded RTP. This - // API should be called every time 10 ms audio is pulled from NetEq. - void UpdateLastDecodedPacket(uint16_t sequence_number, uint32_t timestamp); - - // Update the sequence number and the timestamp of the last received RTP. This - // API should be called every time a packet pushed into ACM. - void UpdateLastReceivedPacket(uint16_t sequence_number, uint32_t timestamp); - - // Get a list of "missing" packets which have expected time-to-play larger - // than the given round-trip-time (in milliseconds). - // Note: Late packets are not included. - std::vector GetNackList(int round_trip_time_ms) const; - - // Reset to default values. The NACK list is cleared. - // |nack_threshold_packets_| & |max_nack_list_size_| preserve their values. - void Reset(); - - private: - // This test need to access the private method GetNackList(). - FRIEND_TEST_ALL_PREFIXES(NackTest, EstimateTimestampAndTimeToPlay); - - struct NackElement { - NackElement(int initial_time_to_play_ms, - uint32_t initial_timestamp, - bool missing) - : time_to_play_ms(initial_time_to_play_ms), - estimated_timestamp(initial_timestamp), - is_missing(missing) {} - - // Estimated time (ms) left for this packet to be decoded. This estimate is - // updated every time jitter buffer decodes a packet. - int time_to_play_ms; - - // A guess about the timestamp of the missing packet, it is used for - // estimation of |time_to_play_ms|. The estimate might be slightly wrong if - // there has been frame-size change since the last received packet and the - // missing packet. However, the risk of this is low, and in case of such - // errors, there will be a minor misestimation in time-to-play of missing - // packets. This will have a very minor effect on NACK performance. - uint32_t estimated_timestamp; - - // True if the packet is considered missing. Otherwise indicates packet is - // late. - bool is_missing; - }; - - class NackListCompare { - public: - bool operator() (uint16_t sequence_number_old, - uint16_t sequence_number_new) const { - return IsNewerSequenceNumber(sequence_number_new, sequence_number_old); - } - }; - - typedef std::map NackList; - - // Constructor. - explicit Nack(int nack_threshold_packets); - - // This API is used only for testing to assess whether time-to-play is - // computed correctly. - NackList GetNackList() const; - - // Given the |sequence_number_current_received_rtp| of currently received RTP, - // recognize packets which are not arrive and add to the list. - void AddToList(uint16_t sequence_number_current_received_rtp); - - // This function subtracts 10 ms of time-to-play for all packets in NACK list. - // This is called when 10 ms elapsed with no new RTP packet decoded. - void UpdateEstimatedPlayoutTimeBy10ms(); - - // Given the |sequence_number_current_received_rtp| and - // |timestamp_current_received_rtp| of currently received RTP update number - // of samples per packet. - void UpdateSamplesPerPacket(uint16_t sequence_number_current_received_rtp, - uint32_t timestamp_current_received_rtp); - - // Given the |sequence_number_current_received_rtp| of currently received RTP - // update the list. That is; some packets will change from late to missing, - // some packets are inserted as missing and some inserted as late. - void UpdateList(uint16_t sequence_number_current_received_rtp); - - // Packets which are considered late for too long (according to - // |nack_threshold_packets_|) are flagged as missing. - void ChangeFromLateToMissing(uint16_t sequence_number_current_received_rtp); - - // Packets which have sequence number older that - // |sequence_num_last_received_rtp_| - |max_nack_list_size_| are removed - // from the NACK list. - void LimitNackListSize(); - - // Estimate timestamp of a missing packet given its sequence number. - uint32_t EstimateTimestamp(uint16_t sequence_number); - - // Compute time-to-play given a timestamp. - int TimeToPlay(uint32_t timestamp) const; - - // If packet N is arrived, any packet prior to N - |nack_threshold_packets_| - // which is not arrived is considered missing, and should be in NACK list. - // Also any packet in the range of N-1 and N - |nack_threshold_packets_|, - // exclusive, which is not arrived is considered late, and should should be - // in the list of late packets. - const int nack_threshold_packets_; - - // Valid if a packet is received. - uint16_t sequence_num_last_received_rtp_; - uint32_t timestamp_last_received_rtp_; - bool any_rtp_received_; // If any packet received. - - // Valid if a packet is decoded. - uint16_t sequence_num_last_decoded_rtp_; - uint32_t timestamp_last_decoded_rtp_; - bool any_rtp_decoded_; // If any packet decoded. - - int sample_rate_khz_; // Sample rate in kHz. - - // Number of samples per packet. We update this every time we receive a - // packet, not only for consecutive packets. - int samples_per_packet_; - - // A list of missing packets to be retransmitted. Components of the list - // contain the sequence number of missing packets and the estimated time that - // each pack is going to be played out. - NackList nack_list_; - - // NACK list will not keep track of missing packets prior to - // |sequence_num_last_received_rtp_| - |max_nack_list_size_|. - size_t max_nack_list_size_; -}; - -} // namespace acm1 - -} // namespace webrtc - -#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_NACK_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/nack_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/nack_unittest.cc deleted file mode 100644 index 811aca4fc812..000000000000 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/nack_unittest.cc +++ /dev/null @@ -1,487 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "webrtc/modules/audio_coding/main/source/nack.h" - -#include - -#include -#include - -#include "gtest/gtest.h" -#include "webrtc/typedefs.h" -#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h" -#include "webrtc/system_wrappers/interface/scoped_ptr.h" - -namespace webrtc { - -namespace acm1 { - -namespace { - -const int kNackThreshold = 3; -const int kSampleRateHz = 16000; -const int kPacketSizeMs = 30; -const uint32_t kTimestampIncrement = 480; // 30 ms. -const int kShortRoundTripTimeMs = 1; - -bool IsNackListCorrect(const std::vector& nack_list, - const uint16_t* lost_sequence_numbers, - size_t num_lost_packets) { - if (nack_list.size() != num_lost_packets) - return false; - - if (num_lost_packets == 0) - return true; - - for (size_t k = 0; k < nack_list.size(); ++k) { - int seq_num = nack_list[k]; - bool seq_num_matched = false; - for (size_t n = 0; n < num_lost_packets; ++n) { - if (seq_num == lost_sequence_numbers[n]) { - seq_num_matched = true; - break; - } - } - if (!seq_num_matched) - return false; - } - return true; -} - -} // namespace - -TEST(NackTest, EmptyListWhenNoPacketLoss) { - scoped_ptr nack(Nack::Create(kNackThreshold)); - nack->UpdateSampleRate(kSampleRateHz); - - int seq_num = 1; - uint32_t timestamp = 0; - - std::vector nack_list; - for (int n = 0; n < 100; n++) { - nack->UpdateLastReceivedPacket(seq_num, timestamp); - nack_list = nack->GetNackList(kShortRoundTripTimeMs); - seq_num++; - timestamp += kTimestampIncrement; - nack_list = nack->GetNackList(kShortRoundTripTimeMs); - EXPECT_TRUE(nack_list.empty()); - } -} - -TEST(NackTest, NoNackIfReorderWithinNackThreshold) { - scoped_ptr nack(Nack::Create(kNackThreshold)); - nack->UpdateSampleRate(kSampleRateHz); - - int seq_num = 1; - uint32_t timestamp = 0; - std::vector nack_list; - - nack->UpdateLastReceivedPacket(seq_num, timestamp); - nack_list = nack->GetNackList(kShortRoundTripTimeMs); - EXPECT_TRUE(nack_list.empty()); - int num_late_packets = kNackThreshold + 1; - - // Push in reverse order - while (num_late_packets > 0) { - nack->UpdateLastReceivedPacket(seq_num + num_late_packets, timestamp + - num_late_packets * kTimestampIncrement); - nack_list = nack->GetNackList(kShortRoundTripTimeMs); - EXPECT_TRUE(nack_list.empty()); - num_late_packets--; - } -} - -TEST(NackTest, LatePacketsMovedToNackThenNackListDoesNotChange) { - const uint16_t kSequenceNumberLostPackets[] = { 2, 3, 4, 5, 6, 7, 8, 9 }; - static const int kNumAllLostPackets = sizeof(kSequenceNumberLostPackets) / - sizeof(kSequenceNumberLostPackets[0]); - - for (int k = 0; k < 2; k++) { // Two iteration with/without wrap around. - scoped_ptr nack(Nack::Create(kNackThreshold)); - nack->UpdateSampleRate(kSampleRateHz); - - uint16_t sequence_num_lost_packets[kNumAllLostPackets]; - for (int n = 0; n < kNumAllLostPackets; n++) { - sequence_num_lost_packets[n] = kSequenceNumberLostPackets[n] + k * - 65531; // Have wrap around in sequence numbers for |k == 1|. - } - uint16_t seq_num = sequence_num_lost_packets[0] - 1; - - uint32_t timestamp = 0; - std::vector nack_list; - - nack->UpdateLastReceivedPacket(seq_num, timestamp); - nack_list = nack->GetNackList(kShortRoundTripTimeMs); - EXPECT_TRUE(nack_list.empty()); - - seq_num = sequence_num_lost_packets[kNumAllLostPackets - 1] + 1; - timestamp += kTimestampIncrement * (kNumAllLostPackets + 1); - int num_lost_packets = std::max(0, kNumAllLostPackets - kNackThreshold); - - for (int n = 0; n < kNackThreshold + 1; ++n) { - nack->UpdateLastReceivedPacket(seq_num, timestamp); - nack_list = nack->GetNackList(kShortRoundTripTimeMs); - EXPECT_TRUE(IsNackListCorrect(nack_list, sequence_num_lost_packets, - num_lost_packets)); - seq_num++; - timestamp += kTimestampIncrement; - num_lost_packets++; - } - - for (int n = 0; n < 100; ++n) { - nack->UpdateLastReceivedPacket(seq_num, timestamp); - nack_list = nack->GetNackList(kShortRoundTripTimeMs); - EXPECT_TRUE(IsNackListCorrect(nack_list, sequence_num_lost_packets, - kNumAllLostPackets)); - seq_num++; - timestamp += kTimestampIncrement; - } - } -} - -TEST(NackTest, ArrivedPacketsAreRemovedFromNackList) { - const uint16_t kSequenceNumberLostPackets[] = { 2, 3, 4, 5, 6, 7, 8, 9 }; - static const int kNumAllLostPackets = sizeof(kSequenceNumberLostPackets) / - sizeof(kSequenceNumberLostPackets[0]); - - for (int k = 0; k < 2; ++k) { // Two iteration with/without wrap around. - scoped_ptr nack(Nack::Create(kNackThreshold)); - nack->UpdateSampleRate(kSampleRateHz); - - uint16_t sequence_num_lost_packets[kNumAllLostPackets]; - for (int n = 0; n < kNumAllLostPackets; ++n) { - sequence_num_lost_packets[n] = kSequenceNumberLostPackets[n] + k * - 65531; // Wrap around for |k == 1|. - } - - uint16_t seq_num = sequence_num_lost_packets[0] - 1; - uint32_t timestamp = 0; - - nack->UpdateLastReceivedPacket(seq_num, timestamp); - std::vector nack_list = nack->GetNackList(kShortRoundTripTimeMs); - EXPECT_TRUE(nack_list.empty()); - - size_t index_retransmitted_rtp = 0; - uint32_t timestamp_retransmitted_rtp = timestamp + kTimestampIncrement; - - seq_num = sequence_num_lost_packets[kNumAllLostPackets - 1] + 1; - timestamp += kTimestampIncrement * (kNumAllLostPackets + 1); - size_t num_lost_packets = std::max(0, kNumAllLostPackets - kNackThreshold); - for (int n = 0; n < kNumAllLostPackets; ++n) { - // Number of lost packets does not change for the first - // |kNackThreshold + 1| packets, one is added to the list and one is - // removed. Thereafter, the list shrinks every iteration. - if (n >= kNackThreshold + 1) - num_lost_packets--; - - nack->UpdateLastReceivedPacket(seq_num, timestamp); - nack_list = nack->GetNackList(kShortRoundTripTimeMs); - EXPECT_TRUE(IsNackListCorrect( - nack_list, &sequence_num_lost_packets[index_retransmitted_rtp], - num_lost_packets)); - seq_num++; - timestamp += kTimestampIncrement; - - // Retransmission of a lost RTP. - nack->UpdateLastReceivedPacket( - sequence_num_lost_packets[index_retransmitted_rtp], - timestamp_retransmitted_rtp); - index_retransmitted_rtp++; - timestamp_retransmitted_rtp += kTimestampIncrement; - - nack_list = nack->GetNackList(kShortRoundTripTimeMs); - EXPECT_TRUE(IsNackListCorrect( - nack_list, &sequence_num_lost_packets[index_retransmitted_rtp], - num_lost_packets - 1)); // One less lost packet in the list. - } - ASSERT_TRUE(nack_list.empty()); - } -} - -// Assess if estimation of timestamps and time-to-play is correct. Introduce all -// combinations that timestamps and sequence numbers might have wrap around. -TEST(NackTest, EstimateTimestampAndTimeToPlay) { - const uint16_t kLostPackets[] = { 2, 3, 4, 5, 6, 7, 8, 9, 10, - 11, 12, 13, 14, 15 }; - static const int kNumAllLostPackets = sizeof(kLostPackets) / - sizeof(kLostPackets[0]); - - - for (int k = 0; k < 4; ++k) { - scoped_ptr nack(Nack::Create(kNackThreshold)); - nack->UpdateSampleRate(kSampleRateHz); - - // Sequence number wrap around if |k| is 2 or 3; - int seq_num_offset = (k < 2) ? 0 : 65531; - - // Timestamp wrap around if |k| is 1 or 3. - uint32_t timestamp_offset = (k & 0x1) ? - static_cast(0xffffffff) - 6 : 0; - - uint32_t timestamp_lost_packets[kNumAllLostPackets]; - uint16_t seq_num_lost_packets[kNumAllLostPackets]; - for (int n = 0; n < kNumAllLostPackets; ++n) { - timestamp_lost_packets[n] = timestamp_offset + kLostPackets[n] * - kTimestampIncrement; - seq_num_lost_packets[n] = seq_num_offset + kLostPackets[n]; - } - - // We and to push two packets before lost burst starts. - uint16_t seq_num = seq_num_lost_packets[0] - 2; - uint32_t timestamp = timestamp_lost_packets[0] - 2 * kTimestampIncrement; - - const uint16_t first_seq_num = seq_num; - const uint32_t first_timestamp = timestamp; - - // Two consecutive packets to have a correct estimate of timestamp increase. - nack->UpdateLastReceivedPacket(seq_num, timestamp); - seq_num++; - timestamp += kTimestampIncrement; - nack->UpdateLastReceivedPacket(seq_num, timestamp); - - // A packet after the last one which is supposed to be lost. - seq_num = seq_num_lost_packets[kNumAllLostPackets - 1] + 1; - timestamp = timestamp_lost_packets[kNumAllLostPackets - 1] + - kTimestampIncrement; - nack->UpdateLastReceivedPacket(seq_num, timestamp); - - Nack::NackList nack_list = nack->GetNackList(); - EXPECT_EQ(static_cast(kNumAllLostPackets), nack_list.size()); - - // Pretend the first packet is decoded. - nack->UpdateLastDecodedPacket(first_seq_num, first_timestamp); - nack_list = nack->GetNackList(); - - Nack::NackList::iterator it = nack_list.begin(); - while (it != nack_list.end()) { - seq_num = it->first - seq_num_offset; - int index = seq_num - kLostPackets[0]; - EXPECT_EQ(timestamp_lost_packets[index], it->second.estimated_timestamp); - EXPECT_EQ((index + 2) * kPacketSizeMs, it->second.time_to_play_ms); - ++it; - } - - // Pretend 10 ms is passed, and we had pulled audio from NetEq, it still - // reports the same sequence number as decoded, time-to-play should be - // updated by 10 ms. - nack->UpdateLastDecodedPacket(first_seq_num, first_timestamp); - nack_list = nack->GetNackList(); - it = nack_list.begin(); - while (it != nack_list.end()) { - seq_num = it->first - seq_num_offset; - int index = seq_num - kLostPackets[0]; - EXPECT_EQ((index + 2) * kPacketSizeMs - 10, it->second.time_to_play_ms); - ++it; - } - } -} - -TEST(NackTest, MissingPacketsPriorToLastDecodedRtpShouldNotBeInNackList) { - for (int m = 0; m < 2; ++m) { - uint16_t seq_num_offset = (m == 0) ? 0 : 65531; // Wrap around if |m| is 1. - scoped_ptr nack(Nack::Create(kNackThreshold)); - nack->UpdateSampleRate(kSampleRateHz); - - // Two consecutive packets to have a correct estimate of timestamp increase. - uint16_t seq_num = 0; - nack->UpdateLastReceivedPacket(seq_num_offset + seq_num, - seq_num * kTimestampIncrement); - seq_num++; - nack->UpdateLastReceivedPacket(seq_num_offset + seq_num, - seq_num * kTimestampIncrement); - - // Skip 10 packets (larger than NACK threshold). - const int kNumLostPackets = 10; - seq_num += kNumLostPackets + 1; - nack->UpdateLastReceivedPacket(seq_num_offset + seq_num, - seq_num * kTimestampIncrement); - - const size_t kExpectedListSize = kNumLostPackets - kNackThreshold; - std::vector nack_list = nack->GetNackList(kShortRoundTripTimeMs); - EXPECT_EQ(kExpectedListSize, nack_list.size()); - - for (int k = 0; k < 2; ++k) { - // Decoding of the first and the second arrived packets. - for (int n = 0; n < kPacketSizeMs / 10; ++n) { - nack->UpdateLastDecodedPacket(seq_num_offset + k, - k * kTimestampIncrement); - nack_list = nack->GetNackList(kShortRoundTripTimeMs); - EXPECT_EQ(kExpectedListSize, nack_list.size()); - } - } - - // Decoding of the last received packet. - nack->UpdateLastDecodedPacket(seq_num + seq_num_offset, - seq_num * kTimestampIncrement); - nack_list = nack->GetNackList(kShortRoundTripTimeMs); - EXPECT_TRUE(nack_list.empty()); - - // Make sure list of late packets is also empty. To check that, push few - // packets, if the late list is not empty its content will pop up in NACK - // list. - for (int n = 0; n < kNackThreshold + 10; ++n) { - seq_num++; - nack->UpdateLastReceivedPacket(seq_num_offset + seq_num, - seq_num * kTimestampIncrement); - nack_list = nack->GetNackList(kShortRoundTripTimeMs); - EXPECT_TRUE(nack_list.empty()); - } - } -} - -TEST(NackTest, Reset) { - scoped_ptr nack(Nack::Create(kNackThreshold)); - nack->UpdateSampleRate(kSampleRateHz); - - // Two consecutive packets to have a correct estimate of timestamp increase. - uint16_t seq_num = 0; - nack->UpdateLastReceivedPacket(seq_num, seq_num * kTimestampIncrement); - seq_num++; - nack->UpdateLastReceivedPacket(seq_num, seq_num * kTimestampIncrement); - - // Skip 10 packets (larger than NACK threshold). - const int kNumLostPackets = 10; - seq_num += kNumLostPackets + 1; - nack->UpdateLastReceivedPacket(seq_num, seq_num * kTimestampIncrement); - - const size_t kExpectedListSize = kNumLostPackets - kNackThreshold; - std::vector nack_list = nack->GetNackList(kShortRoundTripTimeMs); - EXPECT_EQ(kExpectedListSize, nack_list.size()); - - nack->Reset(); - nack_list = nack->GetNackList(kShortRoundTripTimeMs); - EXPECT_TRUE(nack_list.empty()); -} - -TEST(NackTest, ListSizeAppliedFromBeginning) { - const size_t kNackListSize = 10; - for (int m = 0; m < 2; ++m) { - uint16_t seq_num_offset = (m == 0) ? 0 : 65525; // Wrap around if |m| is 1. - scoped_ptr nack(Nack::Create(kNackThreshold)); - nack->UpdateSampleRate(kSampleRateHz); - nack->SetMaxNackListSize(kNackListSize); - - uint16_t seq_num = seq_num_offset; - uint32_t timestamp = 0x12345678; - nack->UpdateLastReceivedPacket(seq_num, timestamp); - - // Packet lost more than NACK-list size limit. - uint16_t num_lost_packets = kNackThreshold + kNackListSize + 5; - - seq_num += num_lost_packets + 1; - timestamp += (num_lost_packets + 1) * kTimestampIncrement; - nack->UpdateLastReceivedPacket(seq_num, timestamp); - - std::vector nack_list = nack->GetNackList(kShortRoundTripTimeMs); - EXPECT_EQ(kNackListSize - kNackThreshold, nack_list.size()); - } -} - -TEST(NackTest, ChangeOfListSizeAppliedAndOldElementsRemoved) { - const size_t kNackListSize = 10; - for (int m = 0; m < 2; ++m) { - uint16_t seq_num_offset = (m == 0) ? 0 : 65525; // Wrap around if |m| is 1. - scoped_ptr nack(Nack::Create(kNackThreshold)); - nack->UpdateSampleRate(kSampleRateHz); - - uint16_t seq_num = seq_num_offset; - uint32_t timestamp = 0x87654321; - nack->UpdateLastReceivedPacket(seq_num, timestamp); - - // Packet lost more than NACK-list size limit. - uint16_t num_lost_packets = kNackThreshold + kNackListSize + 5; - - scoped_array seq_num_lost(new uint16_t[num_lost_packets]); - for (int n = 0; n < num_lost_packets; ++n) { - seq_num_lost[n] = ++seq_num; - } - - ++seq_num; - timestamp += (num_lost_packets + 1) * kTimestampIncrement; - nack->UpdateLastReceivedPacket(seq_num, timestamp); - size_t expected_size = num_lost_packets - kNackThreshold; - - std::vector nack_list = nack->GetNackList(kShortRoundTripTimeMs); - EXPECT_EQ(expected_size, nack_list.size()); - - nack->SetMaxNackListSize(kNackListSize); - expected_size = kNackListSize - kNackThreshold; - nack_list = nack->GetNackList(kShortRoundTripTimeMs); - EXPECT_TRUE(IsNackListCorrect( - nack_list, &seq_num_lost[num_lost_packets - kNackListSize], - expected_size)); - - // NACK list does not change size but the content is changing. The oldest - // element is removed and one from late list is inserted. - size_t n; - for (n = 1; n <= static_cast(kNackThreshold); ++n) { - ++seq_num; - timestamp += kTimestampIncrement; - nack->UpdateLastReceivedPacket(seq_num, timestamp); - nack_list = nack->GetNackList(kShortRoundTripTimeMs); - EXPECT_TRUE(IsNackListCorrect( - nack_list, &seq_num_lost[num_lost_packets - kNackListSize + n], - expected_size)); - } - - // NACK list should shrink. - for (; n < kNackListSize; ++n) { - ++seq_num; - timestamp += kTimestampIncrement; - nack->UpdateLastReceivedPacket(seq_num, timestamp); - --expected_size; - nack_list = nack->GetNackList(kShortRoundTripTimeMs); - EXPECT_TRUE(IsNackListCorrect( - nack_list, &seq_num_lost[num_lost_packets - kNackListSize + n], - expected_size)); - } - - // After this packet, NACK list should be empty. - ++seq_num; - timestamp += kTimestampIncrement; - nack->UpdateLastReceivedPacket(seq_num, timestamp); - nack_list = nack->GetNackList(kShortRoundTripTimeMs); - EXPECT_TRUE(nack_list.empty()); - } -} - -TEST(NackTest, RoudTripTimeIsApplied) { - const int kNackListSize = 200; - scoped_ptr nack(Nack::Create(kNackThreshold)); - nack->UpdateSampleRate(kSampleRateHz); - nack->SetMaxNackListSize(kNackListSize); - - uint16_t seq_num = 0; - uint32_t timestamp = 0x87654321; - nack->UpdateLastReceivedPacket(seq_num, timestamp); - - // Packet lost more than NACK-list size limit. - uint16_t kNumLostPackets = kNackThreshold + 5; - - seq_num += (1 + kNumLostPackets); - timestamp += (1 + kNumLostPackets) * kTimestampIncrement; - nack->UpdateLastReceivedPacket(seq_num, timestamp); - - // Expected time-to-play are: - // kPacketSizeMs - 10, 2*kPacketSizeMs - 10, 3*kPacketSizeMs - 10, ... - // - // sequence number: 1, 2, 3, 4, 5 - // time-to-play: 20, 50, 80, 110, 140 - // - std::vector nack_list = nack->GetNackList(100); - ASSERT_EQ(2u, nack_list.size()); - EXPECT_EQ(4, nack_list[0]); - EXPECT_EQ(5, nack_list[1]); -} - -} // namespace acm1 - -} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/ACMTest.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/ACMTest.cc index 09b10abfa825..dbbdade80301 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/ACMTest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/ACMTest.cc @@ -11,4 +11,3 @@ #include "ACMTest.h" ACMTest::~ACMTest() {} - diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/ACMTest.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/ACMTest.h index 7bd3c6cf5002..767add1715ba 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/ACMTest.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/ACMTest.h @@ -8,13 +8,14 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef ACMTEST_H -#define ACMTEST_H +#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_ACMTEST_H_ +#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_ACMTEST_H_ class ACMTest { public: + ACMTest() {} virtual ~ACMTest() = 0; virtual void Perform() = 0; }; -#endif +#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_ACMTEST_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/APITest.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/APITest.cc index a9e2e710deb0..15bac6adc093 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/APITest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/APITest.cc @@ -20,6 +20,7 @@ #include #include "testing/gtest/include/gtest/gtest.h" +#include "webrtc/common.h" #include "webrtc/common_types.h" #include "webrtc/engine_configurations.h" #include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h" @@ -54,9 +55,9 @@ void APITest::Wait(uint32_t waitLengthMs) { } } -APITest::APITest() - : _acmA(AudioCodingModule::Create(1)), - _acmB(AudioCodingModule::Create(2)), +APITest::APITest(const Config& config) + : _acmA(config.Get().Create(1)), + _acmB(config.Get().Create(2)), _channel_A2B(NULL), _channel_B2A(NULL), _writeToFile(true), @@ -238,12 +239,12 @@ int16_t APITest::SetUp() { //--- Set A-to-B channel _channel_A2B = new Channel(2); CHECK_ERROR_MT(_acmA->RegisterTransportCallback(_channel_A2B)); - _channel_A2B->RegisterReceiverACM(_acmB); + _channel_A2B->RegisterReceiverACM(_acmB.get()); //--- Set B-to-A channel _channel_B2A = new Channel(1); CHECK_ERROR_MT(_acmB->RegisterTransportCallback(_channel_B2A)); - _channel_B2A->RegisterReceiverACM(_acmA); + _channel_B2A->RegisterReceiverACM(_acmA.get()); //--- EVENT TIMERS // A @@ -729,11 +730,11 @@ void APITest::TestDelay(char side) { estimDelayCB.SetArithMean(true); if (side == 'A') { - myACM = _acmA; + myACM = _acmA.get(); myChannel = _channel_B2A; myMinDelay = &_minDelayA; } else { - myACM = _acmB; + myACM = _acmB.get(); myChannel = _channel_A2B; myMinDelay = &_minDelayB; } @@ -845,14 +846,14 @@ void APITest::TestRegisteration(char sendSide) { switch (sendSide) { case 'A': { - sendACM = _acmA; - receiveACM = _acmB; + sendACM = _acmA.get(); + receiveACM = _acmB.get(); thereIsDecoder = &_thereIsDecoderB; break; } case 'B': { - sendACM = _acmB; - receiveACM = _acmA; + sendACM = _acmB.get(); + receiveACM = _acmA.get(); thereIsDecoder = &_thereIsDecoderA; break; } @@ -964,17 +965,17 @@ void APITest::TestPlayout(char receiveSide) { AudioPlayoutMode* playoutMode = NULL; switch (receiveSide) { case 'A': { - receiveACM = _acmA; + receiveACM = _acmA.get(); playoutMode = &_playoutModeA; break; } case 'B': { - receiveACM = _acmB; + receiveACM = _acmB.get(); playoutMode = &_playoutModeB; break; } default: - receiveACM = _acmA; + receiveACM = _acmA.get(); } int32_t receiveFreqHz = receiveACM->ReceiveFrequency(); @@ -1018,7 +1019,6 @@ void APITest::TestPlayout(char receiveSide) { } } -// set/get receiver VAD status & mode. void APITest::TestSendVAD(char side) { if (_randomTest) { return; @@ -1044,14 +1044,14 @@ void APITest::TestSendVAD(char side) { dtx = &_sendDTXA; mode = &_sendVADModeA; myChannel = _channel_A2B; - myACM = _acmA; + myACM = _acmA.get(); } else { AudioCodingModule::Codec(_codecCntrB, &myCodec); vad = &_sendVADB; dtx = &_sendDTXB; mode = &_sendVADModeB; myChannel = _channel_B2A; - myACM = _acmB; + myACM = _acmB.get(); } CheckVADStatus(side); @@ -1137,7 +1137,7 @@ void APITest::ChangeCodec(char side) { fprintf(stdout, "Reset Encoder Side A \n"); } if (side == 'A') { - myACM = _acmA; + myACM = _acmA.get(); codecCntr = &_codecCntrA; { WriteLockScoped wl(_apiTestRWLock); @@ -1148,7 +1148,7 @@ void APITest::ChangeCodec(char side) { mode = &_sendVADModeA; myChannel = _channel_A2B; } else { - myACM = _acmB; + myACM = _acmB.get(); codecCntr = &_codecCntrB; { WriteLockScoped wl(_apiTestRWLock); diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/APITest.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/APITest.h index f9e9a9144c5a..3b2d4afba6ff 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/APITest.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/APITest.h @@ -8,18 +8,22 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef API_TEST_H -#define API_TEST_H +#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_APITEST_H_ +#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_APITEST_H_ +#include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h" #include "webrtc/modules/audio_coding/main/test/ACMTest.h" #include "webrtc/modules/audio_coding/main/test/Channel.h" #include "webrtc/modules/audio_coding/main/test/PCMFile.h" #include "webrtc/modules/audio_coding/main/test/utility.h" #include "webrtc/system_wrappers/interface/event_wrapper.h" #include "webrtc/system_wrappers/interface/rw_lock_wrapper.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" namespace webrtc { +class Config; + enum APITESTAction { TEST_CHANGE_CODEC_ONLY = 0, DTX_TEST = 1 @@ -27,7 +31,7 @@ enum APITESTAction { class APITest : public ACMTest { public: - APITest(); + explicit APITest(const Config& config); ~APITest(); void Perform(); @@ -78,8 +82,8 @@ class APITest : public ACMTest { bool APIRunB(); //--- ACMs - AudioCodingModule* _acmA; - AudioCodingModule* _acmB; + scoped_ptr _acmA; + scoped_ptr _acmB; //--- Channels Channel* _channel_A2B; @@ -160,4 +164,4 @@ class APITest : public ACMTest { } // namespace webrtc -#endif +#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_APITEST_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/Channel.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/Channel.h index e53988bf8765..27b2cfb6fbf5 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/Channel.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/Channel.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef CHANNEL_H -#define CHANNEL_H +#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_CHANNEL_H_ +#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_CHANNEL_H_ #include @@ -121,4 +121,4 @@ class Channel : public AudioPacketizationCallback { } // namespace webrtc -#endif +#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_CHANNEL_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/EncodeDecodeTest.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/EncodeDecodeTest.cc index 1ee6abc30ca6..cdf9fdcaeb4c 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/EncodeDecodeTest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/EncodeDecodeTest.cc @@ -19,6 +19,7 @@ #include "testing/gtest/include/gtest/gtest.h" #include "webrtc/common_types.h" +#include "webrtc/common.h" #include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h" #include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h" #include "webrtc/modules/audio_coding/main/test/utility.h" @@ -241,14 +242,16 @@ void Receiver::Run() { } } -EncodeDecodeTest::EncodeDecodeTest() { +EncodeDecodeTest::EncodeDecodeTest(const Config& config) + : config_(config) { _testMode = 2; Trace::CreateTrace(); Trace::SetTraceFile( (webrtc::test::OutputPath() + "acm_encdec_trace.txt").c_str()); } -EncodeDecodeTest::EncodeDecodeTest(int testMode) { +EncodeDecodeTest::EncodeDecodeTest(int testMode, const Config& config) + : config_(config) { //testMode == 0 for autotest //testMode == 1 for testing all codecs/parameters //testMode > 1 for specific user-input test (as it was used before) @@ -270,7 +273,8 @@ void EncodeDecodeTest::Perform() { codePars[1] = 0; codePars[2] = 0; - scoped_ptr acm(AudioCodingModule::Create(0)); + scoped_ptr acm( + config_.Get().Create(0)); struct CodecInst sendCodecTmp; numCodecs = acm->NumberOfCodecs(); @@ -325,7 +329,8 @@ void EncodeDecodeTest::Perform() { void EncodeDecodeTest::EncodeToFile(int fileType, int codeId, int* codePars, int testMode) { - scoped_ptr acm(AudioCodingModule::Create(1)); + scoped_ptr acm( + config_.Get().Create(1)); RTPFile rtpFile; std::string fileName = webrtc::test::OutputPath() + "outFile.rtp"; rtpFile.Open(fileName.c_str(), "wb+"); diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/EncodeDecodeTest.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/EncodeDecodeTest.h index 548f172f8eda..5aa359636ce1 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/EncodeDecodeTest.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/EncodeDecodeTest.h @@ -13,16 +13,18 @@ #include -#include "ACMTest.h" -#include "audio_coding_module.h" -#include "RTPFile.h" -#include "PCMFile.h" -#include "typedefs.h" +#include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h" +#include "webrtc/modules/audio_coding/main/test/ACMTest.h" +#include "webrtc/modules/audio_coding/main/test/PCMFile.h" +#include "webrtc/modules/audio_coding/main/test/RTPFile.h" +#include "webrtc/typedefs.h" namespace webrtc { #define MAX_INCOMING_PAYLOAD 8096 +class Config; + // TestPacketization callback which writes the encoded payloads to file class TestPacketization : public AudioPacketizationCallback { public: @@ -90,8 +92,8 @@ class Receiver { class EncodeDecodeTest : public ACMTest { public: - EncodeDecodeTest(); - EncodeDecodeTest(int testMode); + explicit EncodeDecodeTest(const Config& config); + EncodeDecodeTest(int testMode, const Config& config); virtual void Perform(); uint16_t _playoutFreq; @@ -100,6 +102,8 @@ class EncodeDecodeTest : public ACMTest { private: void EncodeToFile(int fileType, int codeId, int* codePars, int testMode); + const Config& config_; + protected: Sender _sender; Receiver _receiver; @@ -107,4 +111,4 @@ class EncodeDecodeTest : public ACMTest { } // namespace webrtc -#endif +#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_ENCODEDECODETEST_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/PCMFile.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/PCMFile.h index 568b30472b7c..c4487b813321 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/PCMFile.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/PCMFile.h @@ -16,8 +16,8 @@ #include -#include "module_common_types.h" -#include "typedefs.h" +#include "webrtc/modules/interface/module_common_types.h" +#include "webrtc/typedefs.h" namespace webrtc { diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/RTPFile.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/RTPFile.h index 7b146b392680..9b6d5fcafefc 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/RTPFile.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/RTPFile.h @@ -8,16 +8,17 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef RTPFILE_H -#define RTPFILE_H +#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_RTPFILE_H_ +#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_RTPFILE_H_ -#include "audio_coding_module.h" -#include "module_common_types.h" -#include "typedefs.h" -#include "rw_lock_wrapper.h" #include #include +#include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h" +#include "webrtc/modules/interface/module_common_types.h" +#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h" +#include "webrtc/typedefs.h" + namespace webrtc { class RTPStream { @@ -113,4 +114,5 @@ class RTPFile : public RTPStream { }; } // namespace webrtc -#endif + +#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_RTPFILE_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/SpatialAudio.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/SpatialAudio.h index fd9c0e7a64b5..907d690b3dc5 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/SpatialAudio.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/SpatialAudio.h @@ -8,15 +8,15 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef ACM_TEST_SPATIAL_AUDIO_H -#define ACM_TEST_SPATIAL_AUDIO_H +#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_SPATIALAUDIO_H_ +#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_SPATIALAUDIO_H_ +#include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h" +#include "webrtc/modules/audio_coding/main/test/ACMTest.h" +#include "webrtc/modules/audio_coding/main/test/Channel.h" +#include "webrtc/modules/audio_coding/main/test/PCMFile.h" +#include "webrtc/modules/audio_coding/main/test/utility.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" -#include "ACMTest.h" -#include "Channel.h" -#include "PCMFile.h" -#include "audio_coding_module.h" -#include "utility.h" #define MAX_FILE_NAME_LENGTH_BYTE 500 @@ -44,4 +44,4 @@ class SpatialAudio : public ACMTest { } // namespace webrtc -#endif +#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_SPATIALAUDIO_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestAllCodecs.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestAllCodecs.cc index d6c6dc4e6d87..fba7f03297f4 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestAllCodecs.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestAllCodecs.cc @@ -99,9 +99,9 @@ void TestPack::reset_payload_size() { payload_size_ = 0; } -TestAllCodecs::TestAllCodecs(int test_mode) - : acm_a_(AudioCodingModule::Create(0)), - acm_b_(AudioCodingModule::Create(1)), +TestAllCodecs::TestAllCodecs(int test_mode, const Config& config) + : acm_a_(config.Get().Create(0)), + acm_b_(config.Get().Create(1)), channel_a_to_b_(NULL), test_count_(0), packet_size_samples_(0), diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestAllCodecs.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestAllCodecs.h index 5aabcf7f77c0..0231d84c683a 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestAllCodecs.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestAllCodecs.h @@ -8,17 +8,20 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TEST_ALL_CODECS_H_ -#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TEST_ALL_CODECS_H_ +#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TESTALLCODECS_H_ +#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TESTALLCODECS_H_ +#include "webrtc/common.h" +#include "webrtc/modules/audio_coding/main/test/ACMTest.h" +#include "webrtc/modules/audio_coding/main/test/Channel.h" +#include "webrtc/modules/audio_coding/main/test/PCMFile.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" -#include "ACMTest.h" -#include "Channel.h" -#include "PCMFile.h" -#include "typedefs.h" +#include "webrtc/typedefs.h" namespace webrtc { +class Config; + class TestPack : public AudioPacketizationCallback { public: TestPack(); @@ -47,7 +50,7 @@ class TestPack : public AudioPacketizationCallback { class TestAllCodecs : public ACMTest { public: - TestAllCodecs(int test_mode); + TestAllCodecs(int test_mode, const Config& config); ~TestAllCodecs(); void Perform(); @@ -77,4 +80,4 @@ class TestAllCodecs : public ACMTest { } // namespace webrtc -#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TEST_ALL_CODECS_H_ +#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TESTALLCODECS_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestFEC.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestFEC.cc index cbb3647e822f..032579cf060b 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestFEC.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestFEC.cc @@ -8,24 +8,24 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "TestFEC.h" +#include "webrtc/modules/audio_coding/main/test/TestFEC.h" #include - #include -#include "audio_coding_module_typedefs.h" -#include "common_types.h" -#include "engine_configurations.h" -#include "trace.h" -#include "utility.h" +#include "webrtc/common.h" +#include "webrtc/common_types.h" +#include "webrtc/engine_configurations.h" +#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h" +#include "webrtc/modules/audio_coding/main/test/utility.h" +#include "webrtc/system_wrappers/interface/trace.h" #include "webrtc/test/testsupport/fileutils.h" namespace webrtc { -TestFEC::TestFEC() - : _acmA(AudioCodingModule::Create(0)), - _acmB(AudioCodingModule::Create(1)), +TestFEC::TestFEC(const Config& config) + : _acmA(config.Get().Create(0)), + _acmB(config.Get().Create(1)), _channelA2B(NULL), _testCntr(0) { } diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestFEC.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestFEC.h index 94391121d872..af3cdd7dce30 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestFEC.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestFEC.h @@ -8,19 +8,21 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef TEST_FEC_H -#define TEST_FEC_H +#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TESTFEC_H_ +#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TESTFEC_H_ +#include "webrtc/modules/audio_coding/main/test/ACMTest.h" +#include "webrtc/modules/audio_coding/main/test/Channel.h" +#include "webrtc/modules/audio_coding/main/test/PCMFile.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" -#include "ACMTest.h" -#include "Channel.h" -#include "PCMFile.h" namespace webrtc { +class Config; + class TestFEC : public ACMTest { public: - TestFEC(); + explicit TestFEC(const Config& config); ~TestFEC(); void Perform(); @@ -45,4 +47,4 @@ class TestFEC : public ACMTest { } // namespace webrtc -#endif +#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TESTFEC_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestStereo.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestStereo.cc index 65c9983fb97d..b26334c32983 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestStereo.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestStereo.cc @@ -15,7 +15,7 @@ #include #include "gtest/gtest.h" - +#include "webrtc/common.h" #include "webrtc/common_types.h" #include "webrtc/engine_configurations.h" #include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h" @@ -108,9 +108,9 @@ void TestPackStereo::set_lost_packet(bool lost) { lost_packet_ = lost; } -TestStereo::TestStereo(int test_mode) - : acm_a_(AudioCodingModule::Create(0)), - acm_b_(AudioCodingModule::Create(1)), +TestStereo::TestStereo(int test_mode, const Config& config) + : acm_a_(config.Get().Create(0)), + acm_b_(config.Get().Create(1)), channel_a2b_(NULL), test_cntr_(0), pack_size_samp_(0), diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestStereo.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestStereo.h index 53e4f28c1948..88320a0e5a85 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestStereo.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestStereo.h @@ -8,18 +8,20 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TEST_STEREO_H_ -#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TEST_STEREO_H_ +#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TESTSTEREO_H_ +#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TESTSTEREO_H_ #include #include "webrtc/system_wrappers/interface/scoped_ptr.h" -#include "ACMTest.h" -#include "Channel.h" -#include "PCMFile.h" +#include "webrtc/modules/audio_coding/main/test/ACMTest.h" +#include "webrtc/modules/audio_coding/main/test/Channel.h" +#include "webrtc/modules/audio_coding/main/test/PCMFile.h" namespace webrtc { +class Config; + enum StereoMonoMode { kNotSet, kMono, @@ -60,7 +62,7 @@ class TestPackStereo : public AudioPacketizationCallback { class TestStereo : public ACMTest { public: - TestStereo(int test_mode); + TestStereo(int test_mode, const Config& config); ~TestStereo(); void Perform(); @@ -114,4 +116,4 @@ class TestStereo : public ACMTest { } // namespace webrtc -#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TEST_STEREO_H_ +#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TESTSTEREO_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestVADDTX.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestVADDTX.cc index 29c9ade80f79..22e9696ff567 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestVADDTX.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestVADDTX.cc @@ -12,19 +12,20 @@ #include +#include "webrtc/common.h" #include "webrtc/common_types.h" #include "webrtc/engine_configurations.h" +#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h" #include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h" #include "webrtc/modules/audio_coding/main/test/utility.h" -#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h" -#include "webrtc/test/testsupport/fileutils.h" #include "webrtc/system_wrappers/interface/trace.h" +#include "webrtc/test/testsupport/fileutils.h" namespace webrtc { -TestVADDTX::TestVADDTX() - : _acmA(AudioCodingModule::Create(0)), - _acmB(AudioCodingModule::Create(1)), +TestVADDTX::TestVADDTX(const Config& config) + : _acmA(config.Get().Create(0)), + _acmB(config.Get().Create(1)), _channelA2B(NULL) { } diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestVADDTX.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestVADDTX.h index d55bdee5d3ac..e0aa6b813ab6 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestVADDTX.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TestVADDTX.h @@ -8,16 +8,18 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef TEST_VAD_DTX_H -#define TEST_VAD_DTX_H +#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TESTVADDTX_H_ +#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TESTVADDTX_H_ +#include "webrtc/modules/audio_coding/main/test/ACMTest.h" +#include "webrtc/modules/audio_coding/main/test/Channel.h" +#include "webrtc/modules/audio_coding/main/test/PCMFile.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" -#include "ACMTest.h" -#include "Channel.h" -#include "PCMFile.h" namespace webrtc { +class Config; + typedef struct { bool statusDTX; bool statusVAD; @@ -47,7 +49,7 @@ class ActivityMonitor : public ACMVADCallback { class TestVADDTX : public ACMTest { public: - TestVADDTX(); + explicit TestVADDTX(const Config& config); ~TestVADDTX(); void Perform(); @@ -82,4 +84,4 @@ class TestVADDTX : public ACMTest { } // namespace webrtc -#endif +#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TESTVADDTX_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/Tester.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/Tester.cc index 72284ffa3e33..31f7317fc732 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/Tester.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/Tester.cc @@ -13,6 +13,7 @@ #include #include "testing/gtest/include/gtest/gtest.h" +#include "webrtc/common.h" #include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h" #include "webrtc/modules/audio_coding/main/test/APITest.h" #include "webrtc/modules/audio_coding/main/test/EncodeDecodeTest.h" @@ -23,11 +24,11 @@ #include "webrtc/modules/audio_coding/main/test/TestStereo.h" #include "webrtc/modules/audio_coding/main/test/TestVADDTX.h" #include "webrtc/modules/audio_coding/main/test/TwoWayCommunication.h" +#include "webrtc/modules/audio_coding/main/test/utility.h" #include "webrtc/system_wrappers/interface/trace.h" #include "webrtc/test/testsupport/fileutils.h" #include "webrtc/test/testsupport/gtest_disable.h" -using webrtc::AudioCodingModule; using webrtc::Trace; // This parameter is used to describe how to run the tests. It is normally @@ -38,7 +39,14 @@ TEST(AudioCodingModuleTest, TestAllCodecs) { Trace::CreateTrace(); Trace::SetTraceFile((webrtc::test::OutputPath() + "acm_allcodecs_trace.txt").c_str()); - webrtc::TestAllCodecs(ACM_TEST_MODE).Perform(); + webrtc::Config config; + + UseLegacyAcm(&config); + webrtc::TestAllCodecs(ACM_TEST_MODE, config).Perform(); + + UseNewAcm(&config); + webrtc::TestAllCodecs(ACM_TEST_MODE, config).Perform(); + Trace::ReturnTrace(); } @@ -46,7 +54,14 @@ TEST(AudioCodingModuleTest, DISABLED_ON_ANDROID(TestEncodeDecode)) { Trace::CreateTrace(); Trace::SetTraceFile((webrtc::test::OutputPath() + "acm_encodedecode_trace.txt").c_str()); - webrtc::EncodeDecodeTest(ACM_TEST_MODE).Perform(); + webrtc::Config config; + + UseLegacyAcm(&config); + webrtc::EncodeDecodeTest(ACM_TEST_MODE, config).Perform(); + + UseNewAcm(&config); + webrtc::EncodeDecodeTest(ACM_TEST_MODE, config).Perform(); + Trace::ReturnTrace(); } @@ -54,7 +69,14 @@ TEST(AudioCodingModuleTest, DISABLED_ON_ANDROID(TestFEC)) { Trace::CreateTrace(); Trace::SetTraceFile((webrtc::test::OutputPath() + "acm_fec_trace.txt").c_str()); - webrtc::TestFEC().Perform(); + webrtc::Config config; + + UseLegacyAcm(&config); + webrtc::TestFEC(config).Perform(); + + UseNewAcm(&config); + webrtc::TestFEC(config).Perform(); + Trace::ReturnTrace(); } @@ -62,7 +84,14 @@ TEST(AudioCodingModuleTest, DISABLED_ON_ANDROID(TestIsac)) { Trace::CreateTrace(); Trace::SetTraceFile((webrtc::test::OutputPath() + "acm_isac_trace.txt").c_str()); - webrtc::ISACTest(ACM_TEST_MODE).Perform(); + webrtc::Config config; + + UseLegacyAcm(&config); + webrtc::ISACTest(ACM_TEST_MODE, config).Perform(); + + UseNewAcm(&config); + webrtc::ISACTest(ACM_TEST_MODE, config).Perform(); + Trace::ReturnTrace(); } @@ -70,7 +99,14 @@ TEST(AudioCodingModuleTest, DISABLED_ON_ANDROID(TwoWayCommunication)) { Trace::CreateTrace(); Trace::SetTraceFile((webrtc::test::OutputPath() + "acm_twowaycom_trace.txt").c_str()); - webrtc::TwoWayCommunication(ACM_TEST_MODE).Perform(); + webrtc::Config config; + + UseLegacyAcm(&config); + webrtc::TwoWayCommunication(ACM_TEST_MODE, config).Perform(); + + UseNewAcm(&config); + webrtc::TwoWayCommunication(ACM_TEST_MODE, config).Perform(); + Trace::ReturnTrace(); } @@ -78,7 +114,14 @@ TEST(AudioCodingModuleTest, DISABLED_ON_ANDROID(TestStereo)) { Trace::CreateTrace(); Trace::SetTraceFile((webrtc::test::OutputPath() + "acm_stereo_trace.txt").c_str()); - webrtc::TestStereo(ACM_TEST_MODE).Perform(); + + webrtc::Config config; + UseLegacyAcm(&config); + + webrtc::TestStereo(ACM_TEST_MODE, config).Perform(); + UseNewAcm(&config); + + webrtc::TestStereo(ACM_TEST_MODE, config).Perform(); Trace::ReturnTrace(); } @@ -86,7 +129,14 @@ TEST(AudioCodingModuleTest, DISABLED_ON_ANDROID(TestVADDTX)) { Trace::CreateTrace(); Trace::SetTraceFile((webrtc::test::OutputPath() + "acm_vaddtx_trace.txt").c_str()); - webrtc::TestVADDTX().Perform(); + webrtc::Config config; + + UseLegacyAcm(&config); + webrtc::TestVADDTX(config).Perform(); + + UseNewAcm(&config); + webrtc::TestVADDTX(config).Perform(); + Trace::ReturnTrace(); } @@ -94,7 +144,14 @@ TEST(AudioCodingModuleTest, TestOpus) { Trace::CreateTrace(); Trace::SetTraceFile((webrtc::test::OutputPath() + "acm_opus_trace.txt").c_str()); - webrtc::OpusTest().Perform(); + webrtc::Config config; + + UseLegacyAcm(&config); + webrtc::OpusTest(config).Perform(); + + UseNewAcm(&config); + webrtc::OpusTest(config).Perform(); + Trace::ReturnTrace(); } @@ -105,7 +162,14 @@ TEST(AudioCodingModuleTest, TestOpus) { Trace::CreateTrace(); Trace::SetTraceFile((webrtc::test::OutputPath() + "acm_apitest_trace.txt").c_str()); - webrtc::APITest().Perform(); + webrtc::Config config; + + UseLegacyAcm(&config); + webrtc::APITest(config).Perform(); + + UseNewAcm(&config); + webrtc::APITest(config).Perform(); + Trace::ReturnTrace(); } #endif diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TwoWayCommunication.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TwoWayCommunication.cc index 1b74a956a890..fb3d6f48753e 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TwoWayCommunication.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TwoWayCommunication.cc @@ -18,25 +18,25 @@ #include #endif -#include "common_types.h" -#include "engine_configurations.h" #include "gtest/gtest.h" -#include "PCMFile.h" -#include "trace.h" -#include "utility.h" +#include "webrtc/engine_configurations.h" +#include "webrtc/common.h" +#include "webrtc/common_types.h" +#include "webrtc/modules/audio_coding/main/test/PCMFile.h" +#include "webrtc/modules/audio_coding/main/test/utility.h" +#include "webrtc/system_wrappers/interface/trace.h" #include "webrtc/test/testsupport/fileutils.h" namespace webrtc { #define MAX_FILE_NAME_LENGTH_BYTE 500 -TwoWayCommunication::TwoWayCommunication(int testMode) - : _acmA(AudioCodingModule::Create(1)), - _acmB(AudioCodingModule::Create(2)), - _acmRefA(AudioCodingModule::Create(3)), - _acmRefB(AudioCodingModule::Create(4)), - _testMode(testMode) { -} +TwoWayCommunication::TwoWayCommunication(int testMode, const Config& config) + : _acmA(config.Get().Create(1)), + _acmB(config.Get().Create(2)), + _acmRefA(config.Get().Create(3)), + _acmRefB(config.Get().Create(4)), + _testMode(testMode) { } TwoWayCommunication::~TwoWayCommunication() { delete _channel_A2B; diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TwoWayCommunication.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TwoWayCommunication.h index fe0ed2a24bbb..0d1e514dafec 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TwoWayCommunication.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/TwoWayCommunication.h @@ -8,21 +8,23 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef TWO_WAY_COMMUNICATION_H -#define TWO_WAY_COMMUNICATION_H +#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TWOWAYCOMMUNICATION_H_ +#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TWOWAYCOMMUNICATION_H_ +#include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h" +#include "webrtc/modules/audio_coding/main/test/ACMTest.h" +#include "webrtc/modules/audio_coding/main/test/Channel.h" +#include "webrtc/modules/audio_coding/main/test/PCMFile.h" +#include "webrtc/modules/audio_coding/main/test/utility.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" -#include "ACMTest.h" -#include "Channel.h" -#include "PCMFile.h" -#include "audio_coding_module.h" -#include "utility.h" namespace webrtc { +class Config; + class TwoWayCommunication : public ACMTest { public: - TwoWayCommunication(int testMode = 1); + TwoWayCommunication(int testMode, const Config& config); ~TwoWayCommunication(); void Perform(); @@ -57,4 +59,4 @@ class TwoWayCommunication : public ACMTest { } // namespace webrtc -#endif +#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TWOWAYCOMMUNICATION_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/delay_test.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/delay_test.cc index 1a0f8f829f30..63bfe2be4b24 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/delay_test.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/delay_test.cc @@ -8,8 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h" - #include #include @@ -17,8 +15,10 @@ #include "gflags/gflags.h" #include "testing/gtest/include/gtest/gtest.h" +#include "webrtc/common.h" #include "webrtc/common_types.h" #include "webrtc/engine_configurations.h" +#include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h" #include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h" #include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h" #include "webrtc/modules/audio_coding/main/test/Channel.h" @@ -35,68 +35,76 @@ DEFINE_string(input_file, "", "Input file, PCM16 32 kHz, optional."); DEFINE_int32(delay, 0, "Delay in millisecond."); DEFINE_int32(init_delay, 0, "Initial delay in millisecond."); DEFINE_bool(dtx, false, "Enable DTX at the sender side."); +DEFINE_bool(acm2, false, "Run the test with ACM2."); +DEFINE_bool(packet_loss, false, "Apply packet loss, c.f. Channel{.cc, .h}."); +DEFINE_bool(fec, false, "Use Forward Error Correction (FEC)."); namespace webrtc { + namespace { -struct CodecConfig { +struct CodecSettings { char name[50]; int sample_rate_hz; int num_channels; }; -struct AcmConfig { +struct AcmSettings { bool dtx; bool fec; }; -struct Config { - CodecConfig codec; - AcmConfig acm; +struct TestSettings { + CodecSettings codec; + AcmSettings acm; bool packet_loss; }; +} // namespace class DelayTest { public: - - DelayTest() - : acm_a_(AudioCodingModule::Create(0)), - acm_b_(AudioCodingModule::Create(1)), - channel_a2b_(NULL), + explicit DelayTest(const Config& config) + : acm_a_(config.Get().Create(0)), + acm_b_(config.Get().Create(1)), + channel_a2b_(new Channel), test_cntr_(0), encoding_sample_rate_hz_(8000) {} - ~DelayTest() {} - - void TearDown() { + ~DelayTest() { if (channel_a2b_ != NULL) { delete channel_a2b_; channel_a2b_ = NULL; } + in_file_a_.Close(); } - void SetUp() { + void Initialize() { test_cntr_ = 0; std::string file_name = webrtc::test::ResourcePath( "audio_coding/testfile32kHz", "pcm"); if (FLAGS_input_file.size() > 0) file_name = FLAGS_input_file; in_file_a_.Open(file_name, 32000, "rb"); - acm_a_->InitializeReceiver(); - acm_b_->InitializeReceiver(); + ASSERT_EQ(0, acm_a_->InitializeReceiver()) << + "Couldn't initialize receiver.\n"; + ASSERT_EQ(0, acm_b_->InitializeReceiver()) << + "Couldn't initialize receiver.\n"; if (FLAGS_init_delay > 0) { - ASSERT_EQ(0, acm_b_->SetInitialPlayoutDelay(FLAGS_init_delay)); + ASSERT_EQ(0, acm_b_->SetInitialPlayoutDelay(FLAGS_init_delay)) << + "Failed to set initial delay.\n"; } if (FLAGS_delay > 0) { - ASSERT_EQ(0, acm_b_->SetMinimumPlayoutDelay(FLAGS_delay)); + ASSERT_EQ(0, acm_b_->SetMinimumPlayoutDelay(FLAGS_delay)) << + "Failed to set minimum delay.\n"; } - uint8_t num_encoders = acm_a_->NumberOfCodecs(); + int num_encoders = acm_a_->NumberOfCodecs(); CodecInst my_codec_param; for (int n = 0; n < num_encoders; n++) { - acm_b_->Codec(n, &my_codec_param); + EXPECT_EQ(0, acm_b_->Codec(n, &my_codec_param)) << + "Failed to get codec."; if (STR_CASE_CMP(my_codec_param.plname, "opus") == 0) my_codec_param.channels = 1; else if (my_codec_param.channels > 1) @@ -106,16 +114,17 @@ class DelayTest { continue; if (STR_CASE_CMP(my_codec_param.plname, "telephone-event") == 0) continue; - acm_b_->RegisterReceiveCodec(my_codec_param); + ASSERT_EQ(0, acm_b_->RegisterReceiveCodec(my_codec_param)) << + "Couldn't register receive codec.\n"; } // Create and connect the channel - channel_a2b_ = new Channel; - acm_a_->RegisterTransportCallback(channel_a2b_); + ASSERT_EQ(0, acm_a_->RegisterTransportCallback(channel_a2b_)) << + "Couldn't register Transport callback.\n"; channel_a2b_->RegisterReceiverACM(acm_b_.get()); } - void Perform(const Config* config, size_t num_tests, int duration_sec, + void Perform(const TestSettings* config, size_t num_tests, int duration_sec, const char* output_prefix) { for (size_t n = 0; n < num_tests; ++n) { ApplyConfig(config[n]); @@ -124,8 +133,7 @@ class DelayTest { } private: - - void ApplyConfig(const Config& config) { + void ApplyConfig(const TestSettings& config) { printf("====================================\n"); printf("Test %d \n" "Codec: %s, %d kHz, %d channel(s)\n" @@ -140,19 +148,22 @@ class DelayTest { ConfigChannel(config.packet_loss); } - void SendCodec(const CodecConfig& config) { + void SendCodec(const CodecSettings& config) { CodecInst my_codec_param; - ASSERT_EQ( - 0, - AudioCodingModule::Codec(config.name, &my_codec_param, - config.sample_rate_hz, config.num_channels)); + ASSERT_EQ(0, AudioCodingModule::Codec( + config.name, &my_codec_param, config.sample_rate_hz, + config.num_channels)) << "Specified codec is not supported.\n"; + encoding_sample_rate_hz_ = my_codec_param.plfreq; - ASSERT_EQ(0, acm_a_->RegisterSendCodec(my_codec_param)); + ASSERT_EQ(0, acm_a_->RegisterSendCodec(my_codec_param)) << + "Failed to register send-codec.\n"; } - void ConfigAcm(const AcmConfig& config) { - ASSERT_EQ(0, acm_a_->SetVAD(config.dtx, config.dtx, VADAggr)); - ASSERT_EQ(0, acm_a_->SetFECStatus(config.fec)); + void ConfigAcm(const AcmSettings& config) { + ASSERT_EQ(0, acm_a_->SetVAD(config.dtx, config.dtx, VADAggr)) << + "Failed to set VAD.\n"; + ASSERT_EQ(0, acm_a_->SetFECStatus(config.fec)) << + "Failed to set FEC.\n"; } void ConfigChannel(bool packet_loss) { @@ -230,19 +241,39 @@ class DelayTest { int encoding_sample_rate_hz_; }; -void RunTest() { - Config config; - strcpy(config.codec.name, FLAGS_codec.c_str()); - config.codec.sample_rate_hz = FLAGS_sample_rate_hz; - config.codec.num_channels = FLAGS_num_channels; - config.acm.dtx = FLAGS_dtx; - config.acm.fec = false; - config.packet_loss = false; - - DelayTest delay_test; - delay_test.SetUp(); - delay_test.Perform(&config, 1, 240, "delay_test"); - delay_test.TearDown(); -} -} // namespace } // namespace webrtc + +int main(int argc, char* argv[]) { + google::ParseCommandLineFlags(&argc, &argv, true); + webrtc::Config config; + webrtc::TestSettings test_setting; + strcpy(test_setting.codec.name, FLAGS_codec.c_str()); + + if (FLAGS_sample_rate_hz != 8000 && + FLAGS_sample_rate_hz != 16000 && + FLAGS_sample_rate_hz != 32000 && + FLAGS_sample_rate_hz != 48000) { + std::cout << "Invalid sampling rate.\n"; + return 1; + } + test_setting.codec.sample_rate_hz = FLAGS_sample_rate_hz; + if (FLAGS_num_channels < 1 || FLAGS_num_channels > 2) { + std::cout << "Only mono and stereo are supported.\n"; + return 1; + } + test_setting.codec.num_channels = FLAGS_num_channels; + test_setting.acm.dtx = FLAGS_dtx; + test_setting.acm.fec = FLAGS_fec; + test_setting.packet_loss = FLAGS_packet_loss; + + if (FLAGS_acm2) { + webrtc::UseNewAcm(&config); + } else { + webrtc::UseLegacyAcm(&config); + } + + webrtc::DelayTest delay_test(config); + delay_test.Initialize(); + delay_test.Perform(&test_setting, 1, 240, "delay_test"); + return 0; +} diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/dual_stream_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/dual_stream_unittest.cc index 85b1c8ef071d..ba9bb6cb3acb 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/dual_stream_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/dual_stream_unittest.cc @@ -8,25 +8,35 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "../acm2/acm_common_defs.h" #include "gtest/gtest.h" -#include "audio_coding_module.h" -#include "PCMFile.h" -#include "module_common_types.h" -#include "scoped_ptr.h" -#include "typedefs.h" +#include "webrtc/common.h" +#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h" +#include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h" +#include "webrtc/modules/audio_coding/main/test/PCMFile.h" +#include "webrtc/modules/audio_coding/main/test/utility.h" +#include "webrtc/modules/interface/module_common_types.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/typedefs.h" #include "webrtc/test/testsupport/fileutils.h" #include "webrtc/test/testsupport/gtest_disable.h" namespace webrtc { -class DualStreamTest : - public AudioPacketizationCallback, - public ::testing::Test { - protected: - DualStreamTest(); +class DualStreamTest : public AudioPacketizationCallback { + public: + explicit DualStreamTest(const Config& config); ~DualStreamTest(); + void RunTest(int frame_size_primary_samples, + int num_channels_primary, + int sampling_rate, + bool start_in_sync, + int num_channels_input); + + void ApiTest(); + + protected: + int32_t SendData(FrameType frameType, uint8_t payload_type, uint32_t timestamp, const uint8_t* payload_data, uint16_t payload_size, @@ -83,10 +93,10 @@ class DualStreamTest : bool received_payload_[kMaxNumStreams]; }; -DualStreamTest::DualStreamTest() - : acm_dual_stream_(AudioCodingModule::Create(0)), - acm_ref_primary_(AudioCodingModule::Create(1)), - acm_ref_secondary_(AudioCodingModule::Create(2)), +DualStreamTest::DualStreamTest(const Config& config) + : acm_dual_stream_(config.Get().Create(0)), + acm_ref_primary_(config.Get().Create(1)), + acm_ref_secondary_(config.Get().Create(2)), payload_ref_is_stored_(), payload_dual_is_stored_(), timestamp_ref_(), @@ -94,11 +104,9 @@ DualStreamTest::DualStreamTest() num_received_payloads_ref_(), num_compared_payloads_(), last_timestamp_(), - received_payload_() { -} + received_payload_() {} -DualStreamTest::~DualStreamTest() { -} +DualStreamTest::~DualStreamTest() {} void DualStreamTest::PopulateCodecInstances(int frame_size_primary_ms, int num_channels_primary, @@ -380,106 +388,17 @@ int32_t DualStreamTest::SendData(FrameType frameType, uint8_t payload_type, return 0; } -// Mono input, mono primary WB 20 ms frame. -TEST_F(DualStreamTest, - DISABLED_ON_ANDROID(BitExactSyncMonoInputMonoPrimaryWb20Ms)) { - InitializeSender(20, 1, 16000); - Perform(true, 1); -} +void DualStreamTest::RunTest(int frame_size_primary_samples, + int num_channels_primary, + int sampling_rate, + bool start_in_sync, + int num_channels_input) { + InitializeSender( + frame_size_primary_samples, num_channels_primary, sampling_rate); + Perform(start_in_sync, num_channels_input); +}; -// Mono input, stereo primary WB 20 ms frame. -TEST_F(DualStreamTest, - DISABLED_ON_ANDROID(BitExactSyncMonoInput_StereoPrimaryWb20Ms)) { - InitializeSender(20, 2, 16000); - Perform(true, 1); -} - -// Mono input, mono primary SWB 20 ms frame. -TEST_F(DualStreamTest, - DISABLED_ON_ANDROID(BitExactSyncMonoInputMonoPrimarySwb20Ms)) { - InitializeSender(20, 1, 32000); - Perform(true, 1); -} - -// Mono input, stereo primary SWB 20 ms frame. -TEST_F(DualStreamTest, - DISABLED_ON_ANDROID(BitExactSyncMonoInputStereoPrimarySwb20Ms)) { - InitializeSender(20, 2, 32000); - Perform(true, 1); -} - -// Mono input, mono primary WB 40 ms frame. -TEST_F(DualStreamTest, - DISABLED_ON_ANDROID(BitExactSyncMonoInputMonoPrimaryWb40Ms)) { - InitializeSender(40, 1, 16000); - Perform(true, 1); -} - -// Mono input, stereo primary WB 40 ms frame -TEST_F(DualStreamTest, - DISABLED_ON_ANDROID(BitExactSyncMonoInputStereoPrimaryWb40Ms)) { - InitializeSender(40, 2, 16000); - Perform(true, 1); -} - -// Stereo input, mono primary WB 20 ms frame. -TEST_F(DualStreamTest, - DISABLED_ON_ANDROID(BitExactSyncStereoInputMonoPrimaryWb20Ms)) { - InitializeSender(20, 1, 16000); - Perform(true, 2); -} - -// Stereo input, stereo primary WB 20 ms frame. -TEST_F(DualStreamTest, - DISABLED_ON_ANDROID(BitExactSyncStereoInputStereoPrimaryWb20Ms)) { - InitializeSender(20, 2, 16000); - Perform(true, 2); -} - -// Stereo input, mono primary SWB 20 ms frame. -TEST_F(DualStreamTest, - DISABLED_ON_ANDROID(BitExactSyncStereoInputMonoPrimarySwb20Ms)) { - InitializeSender(20, 1, 32000); - Perform(true, 2); -} - -// Stereo input, stereo primary SWB 20 ms frame. -TEST_F(DualStreamTest, - DISABLED_ON_ANDROID(BitExactSyncStereoInputStereoPrimarySwb20Ms)) { - InitializeSender(20, 2, 32000); - Perform(true, 2); -} - -// Stereo input, mono primary WB 40 ms frame. -TEST_F(DualStreamTest, - DISABLED_ON_ANDROID(BitExactSyncStereoInputMonoPrimaryWb40Ms)) { - InitializeSender(40, 1, 16000); - Perform(true, 2); -} - -// Stereo input, stereo primary WB 40 ms frame. -TEST_F(DualStreamTest, - DISABLED_ON_ANDROID(BitExactSyncStereoInputStereoPrimaryWb40Ms)) { - InitializeSender(40, 2, 16000); - Perform(true, 2); -} - -// Asynchronous test, ACM is fed with data then secondary coder is registered. -// Mono input, mono primary WB 20 ms frame. -TEST_F(DualStreamTest, - DISABLED_ON_ANDROID(BitExactAsyncMonoInputMonoPrimaryWb20Ms)) { - InitializeSender(20, 1, 16000); - Perform(false, 1); -} - -// Mono input, mono primary WB 20 ms frame. -TEST_F(DualStreamTest, - DISABLED_ON_ANDROID(BitExactAsyncMonoInputMonoPrimaryWb40Ms)) { - InitializeSender(40, 1, 16000); - Perform(false, 1); -} - -TEST_F(DualStreamTest, DISABLED_ON_ANDROID(Api)) { +void DualStreamTest::ApiTest() { PopulateCodecInstances(20, 1, 16000); CodecInst my_codec; ASSERT_EQ(0, acm_dual_stream_->InitializeSender()); @@ -530,5 +449,171 @@ TEST_F(DualStreamTest, DISABLED_ON_ANDROID(Api)) { EXPECT_EQ(VADVeryAggr, vad_mode); } +namespace { + +DualStreamTest* CreateLegacy() { + Config config; + UseLegacyAcm(&config); + DualStreamTest* test = new DualStreamTest(config); + return test; } - // namespace webrtc + +DualStreamTest* CreateNew() { + Config config; + UseNewAcm(&config); + DualStreamTest* test = new DualStreamTest(config); + return test; +} + +} // namespace + +// Mono input, mono primary WB 20 ms frame. +TEST(DualStreamTest, + DISABLED_ON_ANDROID(BitExactSyncMonoInputMonoPrimaryWb20Ms)) { + scoped_ptr test(CreateLegacy()); + test->RunTest(20, 1, 16000, true, 1); + + test.reset(CreateNew()); + test->RunTest(20, 1, 16000, true, 1); +} + +// Mono input, stereo primary WB 20 ms frame. +TEST(DualStreamTest, + DISABLED_ON_ANDROID(BitExactSyncMonoInput_StereoPrimaryWb20Ms)) { + scoped_ptr test(CreateLegacy()); + test->RunTest(20, 2, 16000, true, 1); + + test.reset(CreateNew()); + test->RunTest(20, 2, 16000, true, 1); +} + +// Mono input, mono primary SWB 20 ms frame. +TEST(DualStreamTest, + DISABLED_ON_ANDROID(BitExactSyncMonoInputMonoPrimarySwb20Ms)) { + scoped_ptr test(CreateLegacy()); + test->RunTest(20, 1, 32000, true, 1); + + test.reset(CreateNew()); + test->RunTest(20, 1, 32000, true, 1); +} + +// Mono input, stereo primary SWB 20 ms frame. +TEST(DualStreamTest, + DISABLED_ON_ANDROID(BitExactSyncMonoInputStereoPrimarySwb20Ms)) { + scoped_ptr test(CreateLegacy()); + test->RunTest(20, 2, 32000, true, 1); + + test.reset(CreateNew()); + test->RunTest(20, 2, 32000, true, 1); +} + +// Mono input, mono primary WB 40 ms frame. +TEST(DualStreamTest, + DISABLED_ON_ANDROID(BitExactSyncMonoInputMonoPrimaryWb40Ms)) { + scoped_ptr test(CreateNew()); + test->RunTest(40, 1, 16000, true, 1); + + test.reset(CreateNew()); + test->RunTest(40, 1, 16000, true, 1); +} + +// Mono input, stereo primary WB 40 ms frame +TEST(DualStreamTest, + DISABLED_ON_ANDROID(BitExactSyncMonoInputStereoPrimaryWb40Ms)) { + scoped_ptr test(CreateNew()); + test->RunTest(40, 2, 16000, true, 1); + + test.reset(CreateNew()); + test->RunTest(40, 2, 16000, true, 1); +} + +// Stereo input, mono primary WB 20 ms frame. +TEST(DualStreamTest, + DISABLED_ON_ANDROID(BitExactSyncStereoInputMonoPrimaryWb20Ms)) { + scoped_ptr test(CreateLegacy()); + test->RunTest(20, 1, 16000, true, 2); + + test.reset(CreateNew()); + test->RunTest(20, 1, 16000, true, 2); +} + +// Stereo input, stereo primary WB 20 ms frame. +TEST(DualStreamTest, + DISABLED_ON_ANDROID(BitExactSyncStereoInputStereoPrimaryWb20Ms)) { + scoped_ptr test(CreateLegacy()); + test->RunTest(20, 2, 16000, true, 2); + + test.reset(CreateNew()); + test->RunTest(20, 2, 16000, true, 2); +} + +// Stereo input, mono primary SWB 20 ms frame. +TEST(DualStreamTest, + DISABLED_ON_ANDROID(BitExactSyncStereoInputMonoPrimarySwb20Ms)) { + scoped_ptr test(CreateLegacy()); + test->RunTest(20, 1, 32000, true, 2); + + test.reset(CreateNew()); + test->RunTest(20, 1, 32000, true, 2); +} + +// Stereo input, stereo primary SWB 20 ms frame. +TEST(DualStreamTest, + DISABLED_ON_ANDROID(BitExactSyncStereoInputStereoPrimarySwb20Ms)) { + scoped_ptr test(CreateLegacy()); + test->RunTest(20, 2, 32000, true, 2); + + test.reset(CreateNew()); + test->RunTest(20, 2, 32000, true, 2); +} + +// Stereo input, mono primary WB 40 ms frame. +TEST(DualStreamTest, + DISABLED_ON_ANDROID(BitExactSyncStereoInputMonoPrimaryWb40Ms)) { + scoped_ptr test(CreateLegacy()); + test->RunTest(40, 1, 16000, true, 2); + + test.reset(CreateNew()); + test->RunTest(40, 1, 16000, true, 2); +} + +// Stereo input, stereo primary WB 40 ms frame. +TEST(DualStreamTest, + DISABLED_ON_ANDROID(BitExactSyncStereoInputStereoPrimaryWb40Ms)) { + scoped_ptr test(CreateLegacy()); + test->RunTest(40, 2, 16000, true, 2); + + test.reset(CreateNew()); + test->RunTest(40, 2, 16000, true, 2); +} + +// Asynchronous test, ACM is fed with data then secondary coder is registered. +// Mono input, mono primary WB 20 ms frame. +TEST(DualStreamTest, + DISABLED_ON_ANDROID(BitExactAsyncMonoInputMonoPrimaryWb20Ms)) { + scoped_ptr test(CreateLegacy()); + test->RunTest(20, 1, 16000, false, 1); + + test.reset(CreateNew()); + test->RunTest(20, 1, 16000, false, 1); +} + +// Mono input, mono primary WB 20 ms frame. +TEST(DualStreamTest, + DISABLED_ON_ANDROID(BitExactAsyncMonoInputMonoPrimaryWb40Ms)) { + scoped_ptr test(CreateLegacy()); + test->RunTest(40, 1, 16000, false, 1); + + test.reset(CreateNew()); + test->RunTest(40, 1, 16000, false, 1); +} + +TEST(DualStreamTest, DISABLED_ON_ANDROID(ApiTest)) { + scoped_ptr test(CreateLegacy()); + test->ApiTest(); + + test.reset(CreateNew()); + test->ApiTest(); +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/iSACTest.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/iSACTest.cc index 26f5b1f02aef..f7fef4a80afc 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/iSACTest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/iSACTest.cc @@ -86,16 +86,13 @@ int16_t SetISAConfig(ACMTestISACConfig& isacConfig, AudioCodingModule* acm, return 0; } -ISACTest::ISACTest(int testMode) - : _acmA(AudioCodingModule::Create(1)), - _acmB(AudioCodingModule::Create(2)), +ISACTest::ISACTest(int testMode, const Config& config) + : _acmA(config.Get().Create(1)), + _acmB(config.Get().Create(2)), _testMode(testMode) { } -ISACTest::~ISACTest() { - delete _channel_A2B; - delete _channel_B2A; -} +ISACTest::~ISACTest() {} void ISACTest::Setup() { int codecCntr; @@ -123,14 +120,14 @@ void ISACTest::Setup() { EXPECT_EQ(0, _acmB->RegisterReceiveCodec(_paramISAC32kHz)); //--- Set A-to-B channel - _channel_A2B = new Channel; - EXPECT_EQ(0, _acmA->RegisterTransportCallback(_channel_A2B)); - _channel_A2B->RegisterReceiverACM(_acmB); + _channel_A2B.reset(new Channel); + EXPECT_EQ(0, _acmA->RegisterTransportCallback(_channel_A2B.get())); + _channel_A2B->RegisterReceiverACM(_acmB.get()); //--- Set B-to-A channel - _channel_B2A = new Channel; - EXPECT_EQ(0, _acmB->RegisterTransportCallback(_channel_B2A)); - _channel_B2A->RegisterReceiverACM(_acmA); + _channel_B2A.reset(new Channel); + EXPECT_EQ(0, _acmB->RegisterTransportCallback(_channel_B2A.get())); + _channel_B2A->RegisterReceiverACM(_acmA.get()); file_name_swb_ = webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm"); @@ -284,8 +281,8 @@ void ISACTest::EncodeDecode(int testNr, ACMTestISACConfig& wbISACConfig, EXPECT_EQ(0, _acmB->RegisterSendCodec(_paramISAC16kHz)); // Side A is sending super-wideband, and side B is sending wideband. - SetISAConfig(swbISACConfig, _acmA, _testMode); - SetISAConfig(wbISACConfig, _acmB, _testMode); + SetISAConfig(swbISACConfig, _acmA.get(), _testMode); + SetISAConfig(wbISACConfig, _acmB.get(), _testMode); bool adaptiveMode = false; if ((swbISACConfig.currentRateBitPerSec == -1) diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/iSACTest.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/iSACTest.h index 3c4ca5facea1..d9563dbd9458 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/iSACTest.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/iSACTest.h @@ -8,23 +8,27 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef ACM_ISAC_TEST_H -#define ACM_ISAC_TEST_H +#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_ISACTEST_H_ +#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_ISACTEST_H_ #include -#include "ACMTest.h" -#include "Channel.h" -#include "PCMFile.h" -#include "audio_coding_module.h" -#include "utility.h" -#include "common_types.h" +#include "webrtc/common.h" +#include "webrtc/common_types.h" +#include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h" +#include "webrtc/modules/audio_coding/main/test/ACMTest.h" +#include "webrtc/modules/audio_coding/main/test/Channel.h" +#include "webrtc/modules/audio_coding/main/test/PCMFile.h" +#include "webrtc/modules/audio_coding/main/test/utility.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" #define MAX_FILE_NAME_LENGTH_BYTE 500 #define NO_OF_CLIENTS 15 namespace webrtc { +class Config; + struct ACMTestISACConfig { int32_t currentRateBitPerSec; int16_t currentFrameSizeMsec; @@ -38,7 +42,7 @@ struct ACMTestISACConfig { class ISACTest : public ACMTest { public: - ISACTest(int testMode); + ISACTest(int testMode, const Config& config); ~ISACTest(); void Perform(); @@ -52,11 +56,11 @@ class ISACTest : public ACMTest { void SwitchingSamplingRate(int testNr, int maxSampRateChange); - AudioCodingModule* _acmA; - AudioCodingModule* _acmB; + scoped_ptr _acmA; + scoped_ptr _acmB; - Channel* _channel_A2B; - Channel* _channel_B2A; + scoped_ptr _channel_A2B; + scoped_ptr _channel_B2A; PCMFile _inFileA; PCMFile _inFileB; @@ -77,4 +81,4 @@ class ISACTest : public ACMTest { } // namespace webrtc -#endif +#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_ISACTEST_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/initial_delay_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/initial_delay_unittest.cc index d1a977602cea..b1892399818d 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/initial_delay_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/initial_delay_unittest.cc @@ -16,6 +16,7 @@ #include #include "gtest/gtest.h" +#include "webrtc/common.h" #include "webrtc/common_types.h" #include "webrtc/engine_configurations.h" #include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h" @@ -30,6 +31,7 @@ namespace webrtc { namespace { + double FrameRms(AudioFrame& frame) { int samples = frame.num_channels_ * frame.samples_per_channel_; double rms = 0; @@ -42,19 +44,14 @@ double FrameRms(AudioFrame& frame) { } -class InitialPlayoutDelayTest : public ::testing::Test { - protected: - - InitialPlayoutDelayTest() - : acm_a_(AudioCodingModule::Create(0)), - acm_b_(AudioCodingModule::Create(1)), - channel_a2b_(NULL) { - } +class InitialPlayoutDelayTest { + public: + explicit InitialPlayoutDelayTest(const Config& config) + : acm_a_(config.Get().Create(0)), + acm_b_(config.Get().Create(1)), + channel_a2b_(NULL) {} ~InitialPlayoutDelayTest() { - } - - void TearDown() { if (channel_a2b_ != NULL) { delete channel_a2b_; channel_a2b_ = NULL; @@ -62,8 +59,11 @@ class InitialPlayoutDelayTest : public ::testing::Test { } void SetUp() { - acm_b_->InitializeReceiver(); - acm_a_->InitializeReceiver(); + ASSERT_TRUE(acm_a_.get() != NULL); + ASSERT_TRUE(acm_b_.get() != NULL); + + EXPECT_EQ(0, acm_b_->InitializeReceiver()); + EXPECT_EQ(0, acm_a_->InitializeReceiver()); // Register all L16 codecs in receiver. CodecInst codec; @@ -82,6 +82,45 @@ class InitialPlayoutDelayTest : public ::testing::Test { channel_a2b_->RegisterReceiverACM(acm_b_.get()); } + void NbMono() { + CodecInst codec; + AudioCodingModule::Codec("L16", &codec, 8000, 1); + Run(codec, 2000); + } + + void WbMono() { + CodecInst codec; + AudioCodingModule::Codec("L16", &codec, 16000, 1); + Run(codec, 2000); + } + + void SwbMono() { + CodecInst codec; + AudioCodingModule::Codec("L16", &codec, 32000, 1); + Run(codec, 1500); // NetEq buffer is not sufficiently large for 3 sec of + // PCM16 super-wideband. + } + + void NbStereo() { + CodecInst codec; + AudioCodingModule::Codec("L16", &codec, 8000, 2); + Run(codec, 2000); + } + + void WbStereo() { + CodecInst codec; + AudioCodingModule::Codec("L16", &codec, 16000, 2); + Run(codec, 1500); + } + + void SwbStereo() { + CodecInst codec; + AudioCodingModule::Codec("L16", &codec, 32000, 2); + Run(codec, 600); // NetEq buffer is not sufficiently large for 3 sec of + // PCM16 super-wideband. + } + + private: void Run(CodecInst codec, int initial_delay_ms) { AudioFrame in_audio_frame; AudioFrame out_audio_frame; @@ -119,43 +158,72 @@ class InitialPlayoutDelayTest : public ::testing::Test { Channel* channel_a2b_; }; -TEST_F( InitialPlayoutDelayTest, NbMono) { - CodecInst codec; - AudioCodingModule::Codec("L16", &codec, 8000, 1); - Run(codec, 3000); +namespace { + +InitialPlayoutDelayTest* CreateLegacy() { + Config config; + UseLegacyAcm(&config); + InitialPlayoutDelayTest* test = new InitialPlayoutDelayTest(config); + test->SetUp(); + return test; } -TEST_F( InitialPlayoutDelayTest, WbMono) { - CodecInst codec; - AudioCodingModule::Codec("L16", &codec, 16000, 1); - Run(codec, 3000); +InitialPlayoutDelayTest* CreateNew() { + Config config; + UseNewAcm(&config); + InitialPlayoutDelayTest* test = new InitialPlayoutDelayTest(config); + test->SetUp(); + return test; } -TEST_F( InitialPlayoutDelayTest, SwbMono) { - CodecInst codec; - AudioCodingModule::Codec("L16", &codec, 32000, 1); - Run(codec, 2000); // NetEq buffer is not sufficiently large for 3 sec of - // PCM16 super-wideband. +} // namespace + +TEST(InitialPlayoutDelayTest, NbMono) { + scoped_ptr test(CreateLegacy()); + test->NbMono(); + + test.reset(CreateNew()); + test->NbMono(); } -TEST_F( InitialPlayoutDelayTest, NbStereo) { - CodecInst codec; - AudioCodingModule::Codec("L16", &codec, 8000, 2); - Run(codec, 3000); +TEST(InitialPlayoutDelayTest, WbMono) { + scoped_ptr test(CreateLegacy()); + test->WbMono(); + + test.reset(CreateNew()); + test->WbMono(); } -TEST_F( InitialPlayoutDelayTest, WbStereo) { - CodecInst codec; - AudioCodingModule::Codec("L16", &codec, 16000, 2); - Run(codec, 3000); +TEST(InitialPlayoutDelayTest, SwbMono) { + scoped_ptr test(CreateLegacy()); + test->SwbMono(); + + test.reset(CreateNew()); + test->SwbMono(); } -TEST_F( InitialPlayoutDelayTest, SwbStereo) { - CodecInst codec; - AudioCodingModule::Codec("L16", &codec, 32000, 2); - Run(codec, 2000); // NetEq buffer is not sufficiently large for 3 sec of - // PCM16 super-wideband. +TEST(InitialPlayoutDelayTest, NbStereo) { + scoped_ptr test(CreateLegacy()); + test->NbStereo(); + + test.reset(CreateNew()); + test->NbStereo(); } +TEST(InitialPlayoutDelayTest, WbStereo) { + scoped_ptr test(CreateLegacy()); + test->WbStereo(); + + test.reset(CreateNew()); + test->WbStereo(); } - // namespace webrtc + +TEST(InitialPlayoutDelayTest, SwbStereo) { + scoped_ptr test(CreateLegacy()); + test->SwbStereo(); + + test.reset(CreateNew()); + test->SwbStereo(); +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/opus_test.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/opus_test.cc index 51169340b829..3c9adb701930 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/opus_test.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/opus_test.cc @@ -15,6 +15,7 @@ #include #include "testing/gtest/include/gtest/gtest.h" +#include "webrtc/common.h" // Config. #include "webrtc/common_types.h" #include "webrtc/engine_configurations.h" #include "webrtc/modules/audio_coding/codecs/opus/interface/opus_interface.h" @@ -28,8 +29,8 @@ namespace webrtc { -OpusTest::OpusTest() - : acm_receiver_(AudioCodingModule::Create(0)), +OpusTest::OpusTest(const Config& config) + : acm_receiver_(config.Get().Create(0)), channel_a2b_(NULL), counter_(0), payload_type_(255), @@ -218,6 +219,7 @@ void OpusTest::Run(TestPackStereo* channel, int channels, int bitrate, int written_samples = 0; int read_samples = 0; int decoded_samples = 0; + channel->reset_payload_size(); counter_ = 0; @@ -225,7 +227,8 @@ void OpusTest::Run(TestPackStereo* channel, int channels, int bitrate, EXPECT_EQ(0, WebRtcOpus_SetBitRate(opus_mono_encoder_, bitrate)); EXPECT_EQ(0, WebRtcOpus_SetBitRate(opus_stereo_encoder_, bitrate)); - while (1) { + // Make sure the runtime is less than 60 seconds to pass Android test. + for (size_t audio_length = 0; audio_length < 10000; audio_length += 10) { bool lost_packet = false; // Get 10 msec of audio. @@ -321,7 +324,7 @@ void OpusTest::Run(TestPackStereo* channel, int channels, int bitrate, } // Run received side of ACM. - CHECK_ERROR(acm_receiver_->PlayoutData10Ms(out_freq_hz_b, &audio_frame)); + ASSERT_EQ(0, acm_receiver_->PlayoutData10Ms(out_freq_hz_b, &audio_frame)); // Write output speech to file. out_file_.Write10MsData( diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/opus_test.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/opus_test.h index 49b98ea86280..08dce98a1eeb 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/opus_test.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/opus_test.h @@ -23,9 +23,11 @@ namespace webrtc { +class Config; + class OpusTest : public ACMTest { public: - OpusTest(); + explicit OpusTest(const Config& config); ~OpusTest(); void Perform(); diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/target_delay_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/target_delay_unittest.cc index 9d23ec6f583d..f01e6ffba9fc 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/target_delay_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/target_delay_unittest.cc @@ -9,8 +9,11 @@ */ #include "gtest/gtest.h" +#include "webrtc/common.h" #include "webrtc/common_types.h" +#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h" #include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h" +#include "webrtc/modules/audio_coding/main/test/utility.h" #include "webrtc/modules/interface/module_common_types.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" #include "webrtc/system_wrappers/interface/sleep.h" @@ -18,22 +21,14 @@ #include "webrtc/test/testsupport/gtest_disable.h" namespace webrtc { -class TargetDelayTest : public ::testing::Test { - protected: - static const int kSampleRateHz = 16000; - static const int kNum10msPerFrame = 2; - static const int kFrameSizeSamples = 320; // 20 ms @ 16 kHz. - // payload-len = frame-samples * 2 bytes/sample. - static const int kPayloadLenBytes = 320 * 2; - // Inter-arrival time in number of packets in a jittery channel. One is no - // jitter. - static const int kInterarrivalJitterPacket = 2; - TargetDelayTest() - : acm_(AudioCodingModule::Create(0)) {} - ~TargetDelayTest() { - } +class TargetDelayTest { + public: + explicit TargetDelayTest(const Config& config) + : acm_(config.Get().Create(0)) {} + + ~TargetDelayTest() {} void SetUp() { EXPECT_TRUE(acm_.get() != NULL); @@ -51,13 +46,107 @@ class TargetDelayTest : public ::testing::Test { rtp_info_.type.Audio.channel = 1; rtp_info_.type.Audio.isCNG = false; rtp_info_.frameType = kAudioFrameSpeech; + + int16_t audio[kFrameSizeSamples]; + const int kRange = 0x7FF; // 2047, easy for masking. + for (int n = 0; n < kFrameSizeSamples; ++n) + audio[n] = (rand() & kRange) - kRange / 2; + WebRtcPcm16b_Encode(audio, kFrameSizeSamples, payload_); } + void OutOfRangeInput() { + EXPECT_EQ(-1, SetMinimumDelay(-1)); + EXPECT_EQ(-1, SetMinimumDelay(10001)); + } + + void NoTargetDelayBufferSizeChanges() { + for (int n = 0; n < 30; ++n) // Run enough iterations. + Run(true); + int clean_optimal_delay = GetCurrentOptimalDelayMs(); + Run(false); // Run with jitter. + int jittery_optimal_delay = GetCurrentOptimalDelayMs(); + EXPECT_GT(jittery_optimal_delay, clean_optimal_delay); + int required_delay = RequiredDelay(); + EXPECT_GT(required_delay, 0); + EXPECT_NEAR(required_delay, jittery_optimal_delay, 1); + } + + void WithTargetDelayBufferNotChanging() { + // A target delay that is one packet larger than jitter. + const int kTargetDelayMs = (kInterarrivalJitterPacket + 1) * + kNum10msPerFrame * 10; + ASSERT_EQ(0, SetMinimumDelay(kTargetDelayMs)); + for (int n = 0; n < 30; ++n) // Run enough iterations to fill the buffer. + Run(true); + int clean_optimal_delay = GetCurrentOptimalDelayMs(); + EXPECT_EQ(kTargetDelayMs, clean_optimal_delay); + Run(false); // Run with jitter. + int jittery_optimal_delay = GetCurrentOptimalDelayMs(); + EXPECT_EQ(jittery_optimal_delay, clean_optimal_delay); + } + + void RequiredDelayAtCorrectRange() { + for (int n = 0; n < 30; ++n) // Run clean and store delay. + Run(true); + int clean_optimal_delay = GetCurrentOptimalDelayMs(); + + // A relatively large delay. + const int kTargetDelayMs = (kInterarrivalJitterPacket + 10) * + kNum10msPerFrame * 10; + ASSERT_EQ(0, SetMinimumDelay(kTargetDelayMs)); + for (int n = 0; n < 300; ++n) // Run enough iterations to fill the buffer. + Run(true); + Run(false); // Run with jitter. + + int jittery_optimal_delay = GetCurrentOptimalDelayMs(); + EXPECT_EQ(kTargetDelayMs, jittery_optimal_delay); + + int required_delay = RequiredDelay(); + + // Checking |required_delay| is in correct range. + EXPECT_GT(required_delay, 0); + EXPECT_GT(jittery_optimal_delay, required_delay); + EXPECT_GT(required_delay, clean_optimal_delay); + + // A tighter check for the value of |required_delay|. + // The jitter forces a delay of + // |kInterarrivalJitterPacket * kNum10msPerFrame * 10| milliseconds. So we + // expect |required_delay| be close to that. + EXPECT_NEAR(kInterarrivalJitterPacket * kNum10msPerFrame * 10, + required_delay, 1); + } + + void TargetDelayBufferMinMax() { + const int kTargetMinDelayMs = kNum10msPerFrame * 10; + ASSERT_EQ(0, SetMinimumDelay(kTargetMinDelayMs)); + for (int m = 0; m < 30; ++m) // Run enough iterations to fill the buffer. + Run(true); + int clean_optimal_delay = GetCurrentOptimalDelayMs(); + EXPECT_EQ(kTargetMinDelayMs, clean_optimal_delay); + + const int kTargetMaxDelayMs = 2 * (kNum10msPerFrame * 10); + ASSERT_EQ(0, SetMaximumDelay(kTargetMaxDelayMs)); + for (int n = 0; n < 30; ++n) // Run enough iterations to fill the buffer. + Run(false); + + int capped_optimal_delay = GetCurrentOptimalDelayMs(); + EXPECT_EQ(kTargetMaxDelayMs, capped_optimal_delay); + } + + private: + static const int kSampleRateHz = 16000; + static const int kNum10msPerFrame = 2; + static const int kFrameSizeSamples = 320; // 20 ms @ 16 kHz. + // payload-len = frame-samples * 2 bytes/sample. + static const int kPayloadLenBytes = 320 * 2; + // Inter-arrival time in number of packets in a jittery channel. One is no + // jitter. + static const int kInterarrivalJitterPacket = 2; + void Push() { rtp_info_.header.timestamp += kFrameSizeSamples; rtp_info_.header.sequenceNumber++; - uint8_t payload[kPayloadLenBytes]; // Doesn't need to be initialized. - ASSERT_EQ(0, acm_->IncomingPacket(payload, kFrameSizeSamples * 2, + ASSERT_EQ(0, acm_->IncomingPacket(payload_, kFrameSizeSamples * 2, rtp_info_)); } @@ -110,85 +199,69 @@ class TargetDelayTest : public ::testing::Test { scoped_ptr acm_; WebRtcRTPHeader rtp_info_; + uint8_t payload_[kPayloadLenBytes]; }; -TEST_F(TargetDelayTest, DISABLED_ON_ANDROID(OutOfRangeInput)) { - EXPECT_EQ(-1, SetMinimumDelay(-1)); - EXPECT_EQ(-1, SetMinimumDelay(10001)); + +namespace { + +TargetDelayTest* CreateLegacy() { + Config config; + UseLegacyAcm(&config); + TargetDelayTest* test = new TargetDelayTest(config); + test->SetUp(); + return test; } -TEST_F(TargetDelayTest, DISABLED_ON_ANDROID(NoTargetDelayBufferSizeChanges)) { - for (int n = 0; n < 30; ++n) // Run enough iterations. - Run(true); - int clean_optimal_delay = GetCurrentOptimalDelayMs(); - Run(false); // Run with jitter. - int jittery_optimal_delay = GetCurrentOptimalDelayMs(); - EXPECT_GT(jittery_optimal_delay, clean_optimal_delay); - int required_delay = RequiredDelay(); - EXPECT_GT(required_delay, 0); - EXPECT_NEAR(required_delay, jittery_optimal_delay, 1); +TargetDelayTest* CreateNew() { + Config config; + UseNewAcm(&config); + TargetDelayTest* test = new TargetDelayTest(config); + test->SetUp(); + return test; } -TEST_F(TargetDelayTest, DISABLED_ON_ANDROID(WithTargetDelayBufferNotChanging)) { - // A target delay that is one packet larger than jitter. - const int kTargetDelayMs = (kInterarrivalJitterPacket + 1) * - kNum10msPerFrame * 10; - ASSERT_EQ(0, SetMinimumDelay(kTargetDelayMs)); - for (int n = 0; n < 30; ++n) // Run enough iterations to fill up the buffer. - Run(true); - int clean_optimal_delay = GetCurrentOptimalDelayMs(); - EXPECT_EQ(kTargetDelayMs, clean_optimal_delay); - Run(false); // Run with jitter. - int jittery_optimal_delay = GetCurrentOptimalDelayMs(); - EXPECT_EQ(jittery_optimal_delay, clean_optimal_delay); +} // namespace + +TEST(TargetDelayTest, DISABLED_ON_ANDROID(OutOfRangeInput)) { + scoped_ptr test(CreateLegacy()); + test->OutOfRangeInput(); + + test.reset(CreateNew()); + test->OutOfRangeInput(); } -TEST_F(TargetDelayTest, DISABLED_ON_ANDROID(RequiredDelayAtCorrectRange)) { - for (int n = 0; n < 30; ++n) // Run clean and store delay. - Run(true); - int clean_optimal_delay = GetCurrentOptimalDelayMs(); +TEST(TargetDelayTest, DISABLED_ON_ANDROID(NoTargetDelayBufferSizeChanges)) { + scoped_ptr test(CreateLegacy()); + test->NoTargetDelayBufferSizeChanges(); - // A relatively large delay. - const int kTargetDelayMs = (kInterarrivalJitterPacket + 10) * - kNum10msPerFrame * 10; - ASSERT_EQ(0, SetMinimumDelay(kTargetDelayMs)); - for (int n = 0; n < 300; ++n) // Run enough iterations to fill up the buffer. - Run(true); - Run(false); // Run with jitter. - - int jittery_optimal_delay = GetCurrentOptimalDelayMs(); - EXPECT_EQ(kTargetDelayMs, jittery_optimal_delay); - - int required_delay = RequiredDelay(); - - // Checking |required_delay| is in correct range. - EXPECT_GT(required_delay, 0); - EXPECT_GT(jittery_optimal_delay, required_delay); - EXPECT_GT(required_delay, clean_optimal_delay); - - // A tighter check for the value of |required_delay|. - // The jitter forces a delay of - // |kInterarrivalJitterPacket * kNum10msPerFrame * 10| milliseconds. So we - // expect |required_delay| be close to that. - EXPECT_NEAR(kInterarrivalJitterPacket * kNum10msPerFrame * 10, - required_delay, 1); + test.reset(CreateNew()); + test->NoTargetDelayBufferSizeChanges(); } -TEST_F(TargetDelayTest, DISABLED_ON_ANDROID(TargetDelayBufferMinMax)) { - const int kTargetMinDelayMs = kNum10msPerFrame * 10; - ASSERT_EQ(0, SetMinimumDelay(kTargetMinDelayMs)); - for (int m = 0; m < 30; ++m) // Run enough iterations to fill up the buffer. - Run(true); - int clean_optimal_delay = GetCurrentOptimalDelayMs(); - EXPECT_EQ(kTargetMinDelayMs, clean_optimal_delay); +TEST(TargetDelayTest, DISABLED_ON_ANDROID(WithTargetDelayBufferNotChanging)) { + scoped_ptr test(CreateLegacy()); + test->WithTargetDelayBufferNotChanging(); - const int kTargetMaxDelayMs = 2 * (kNum10msPerFrame * 10); - ASSERT_EQ(0, SetMaximumDelay(kTargetMaxDelayMs)); - for (int n = 0; n < 30; ++n) // Run enough iterations to fill up the buffer. - Run(false); - - int capped_optimal_delay = GetCurrentOptimalDelayMs(); - EXPECT_EQ(kTargetMaxDelayMs, capped_optimal_delay); + test.reset(CreateNew()); + test->WithTargetDelayBufferNotChanging(); } -} // webrtc +TEST(TargetDelayTest, DISABLED_ON_ANDROID(RequiredDelayAtCorrectRange)) { + scoped_ptr test(CreateLegacy()); + test->RequiredDelayAtCorrectRange(); + + test.reset(CreateNew()); + test->RequiredDelayAtCorrectRange(); +} + +TEST(TargetDelayTest, DISABLED_ON_ANDROID(TargetDelayBufferMinMax)) { + scoped_ptr test(CreateLegacy()); + test->TargetDelayBufferMinMax(); + + test.reset(CreateNew()); + test->TargetDelayBufferMinMax(); +} + +} // namespace webrtc + diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/utility.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/utility.cc index 4b696402142a..d6441ac6baab 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/utility.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/utility.cc @@ -15,6 +15,7 @@ #include #include "testing/gtest/include/gtest/gtest.h" +#include "webrtc/common.h" #include "webrtc/common_types.h" #include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h" #include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h" @@ -329,4 +330,14 @@ int32_t VADCallback::InFrameType(int16_t frameType) { return 0; } +void UseLegacyAcm(webrtc::Config* config) { + config->Set( + new webrtc::AudioCodingModuleFactory()); +} + +void UseNewAcm(webrtc::Config* config) { + config->Set( + new webrtc::NewAudioCodingModuleFactory()); +} + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/utility.h b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/utility.h index 13c3e2c2795c..038643b9344a 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/utility.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/test/utility.h @@ -143,6 +143,10 @@ class VADCallback : public ACMVADCallback { uint32_t _numFrameTypes[6]; }; +void UseLegacyAcm(webrtc::Config* config); + +void UseNewAcm(webrtc::Config* config); + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_UTILITY_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/automode.c b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/automode.c index a922448591b3..4dbd81ed6650 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/automode.c +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/automode.c @@ -28,6 +28,17 @@ extern FILE *delay_fid2; /* file pointer to delay log file */ #endif /* NETEQ_DELAY_LOGGING */ +// These two functions are copied from module_common_types.h, but adapted for C. +int WebRtcNetEQ_IsNewerSequenceNumber(uint16_t sequence_number, + uint16_t prev_sequence_number) { + return sequence_number != prev_sequence_number && + ((uint16_t) (sequence_number - prev_sequence_number)) < 0x8000; +} + +int WebRtcNetEQ_IsNewerTimestamp(uint32_t timestamp, uint32_t prev_timestamp) { + return timestamp != prev_timestamp && + ((uint32_t) (timestamp - prev_timestamp)) < 0x80000000; +} int WebRtcNetEQ_UpdateIatStatistics(AutomodeInst_t *inst, int maxBufLen, uint16_t seqNumber, uint32_t timeStamp, @@ -55,7 +66,8 @@ int WebRtcNetEQ_UpdateIatStatistics(AutomodeInst_t *inst, int maxBufLen, /****************************/ /* Try calculating packet length from current and previous timestamps */ - if ((timeStamp <= inst->lastTimeStamp) || (seqNumber <= inst->lastSeqNo)) + if (!WebRtcNetEQ_IsNewerTimestamp(timeStamp, inst->lastTimeStamp) || + !WebRtcNetEQ_IsNewerSequenceNumber(seqNumber, inst->lastSeqNo)) { /* Wrong timestamp or sequence order; revert to backup plan */ packetLenSamp = inst->packetSpeechLenSamp; /* use stored value */ @@ -68,7 +80,7 @@ int WebRtcNetEQ_UpdateIatStatistics(AutomodeInst_t *inst, int maxBufLen, } /* Check that the packet size is positive; if not, the statistics cannot be updated. */ - if (packetLenSamp > 0) + if (inst->firstPacketReceived && packetLenSamp > 0) { /* packet size ok */ /* calculate inter-arrival time in integer packets (rounding down) */ @@ -113,19 +125,19 @@ int WebRtcNetEQ_UpdateIatStatistics(AutomodeInst_t *inst, int maxBufLen, } /* end of streaming mode */ /* check for discontinuous packet sequence and re-ordering */ - if (seqNumber > inst->lastSeqNo + 1) + if (WebRtcNetEQ_IsNewerSequenceNumber(seqNumber, inst->lastSeqNo + 1)) { /* Compensate for gap in the sequence numbers. * Reduce IAT with expected extra time due to lost packets, but ensure that * the IAT is not negative. */ timeIat -= WEBRTC_SPL_MIN(timeIat, - (uint32_t) (seqNumber - inst->lastSeqNo - 1)); + (uint16_t) (seqNumber - (uint16_t) (inst->lastSeqNo + 1))); } - else if (seqNumber < inst->lastSeqNo) + else if (!WebRtcNetEQ_IsNewerSequenceNumber(seqNumber, inst->lastSeqNo)) { /* compensate for re-ordering */ - timeIat += (uint32_t) (inst->lastSeqNo + 1 - seqNumber); + timeIat += (uint16_t) (inst->lastSeqNo + 1 - seqNumber); } /* saturate IAT at maximum value */ @@ -316,6 +328,8 @@ int WebRtcNetEQ_UpdateIatStatistics(AutomodeInst_t *inst, int maxBufLen, inst->lastTimeStamp = timeStamp; /* remember current timestamp */ + inst->firstPacketReceived = 1; + return retval; } diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/automode.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/automode.h index 16d72e8d563e..c5dd829b8347 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/automode.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/automode.h @@ -80,6 +80,8 @@ typedef struct contained special information */ uint16_t lastSeqNo; /* sequence number for last packet received */ uint32_t lastTimeStamp; /* timestamp for the last packet received */ + int firstPacketReceived; /* set to zero implicitly when the instance is + filled with zeros */ int32_t sampleMemory; /* memory position for keeping track of how many samples we cut during expand */ int16_t prevTimeScale; /* indicates that the last mode was an accelerate diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/dtmf_buffer.c b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/dtmf_buffer.c index 9e3212646406..1788635c7dfb 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/dtmf_buffer.c +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/dtmf_buffer.c @@ -93,7 +93,7 @@ int16_t WebRtcNetEQ_DtmfInsertEvent(dtmf_inst_t *DTMFdec_inst, if (len == 4) { EventStart = encoded; -#ifdef WEBRTC_BIG_ENDIAN +#ifdef WEBRTC_ARCH_BIG_ENDIAN value=((*EventStart)>>8); endEvent=((*EventStart)&0x80)>>7; Volume=((*EventStart)&0x3F); diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/neteq.gypi b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/neteq.gypi index 31297ffe6cad..a8acfb3df24d 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/neteq.gypi +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/neteq.gypi @@ -21,10 +21,12 @@ ], 'include_dirs': [ 'interface', + '<(webrtc_root)', ], 'direct_dependent_settings': { 'include_dirs': [ 'interface', + '<(webrtc_root)', ], }, 'sources': [ @@ -165,6 +167,7 @@ 'PCM16B', 'neteq_unittest_tools', '<(DEPTH)/third_party/gflags/gflags.gyp:gflags', + '<(webrtc_root)/test/test.gyp:test_support_main', ], 'sources': [ 'test/neteq_speed_test.cc', @@ -251,10 +254,10 @@ 'target_name': 'neteq_unittests_run', 'type': 'none', 'dependencies': [ - '<(import_isolate_path):import_isolate_gypi', 'neteq_unittests', ], 'includes': [ + '../../../build/isolate.gypi', 'neteq_unittests.isolate', ], 'sources': [ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/neteq_defines.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/neteq_defines.h index 1f3b682ff56b..d6f30628dc49 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/neteq_defines.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/neteq_defines.h @@ -306,6 +306,9 @@ #define NETEQ_G722_1C_CODEC #define NETEQ_CELT_CODEC + /* hack in 48 kHz support */ + #define NETEQ_48KHZ_WIDEBAND + /* Fullband 48 kHz codecs */ #define NETEQ_OPUS_CODEC #define NETEQ_ISAC_FB_CODEC @@ -323,7 +326,6 @@ #define NETEQ_PCM16B_CODEC #define NETEQ_G711_CODEC #define NETEQ_ILBC_CODEC - #define NETEQ_OPUS_CODEC #define NETEQ_G729_CODEC #define NETEQ_G726_CODEC #define NETEQ_GSMFR_CODEC @@ -332,7 +334,6 @@ /* Wideband codecs */ #define NETEQ_WIDEBAND #define NETEQ_ISAC_CODEC - /*#define NETEQ_OPUS_CODEC define only once */ #define NETEQ_G722_CODEC #define NETEQ_G722_1_CODEC #define NETEQ_G729_1_CODEC @@ -341,19 +342,14 @@ /* Super wideband 32kHz codecs */ #define NETEQ_ISAC_SWB_CODEC - /*#define NETEQ_OPUS_CODEC*/ #define NETEQ_32KHZ_WIDEBAND #define NETEQ_G722_1C_CODEC #define NETEQ_CELT_CODEC - /*#define NETEQ_OPUS_CODEC*/ - - /* hack in 48 kHz support */ - #define NETEQ_48KHZ_WIDEBAND /* Super wideband 48kHz codecs */ #define NETEQ_48KHZ_WIDEBAND - /*#define NETEQ_OPUS_CODEC*/ - #define NETEQ_ISAC_FB + #define NETEQ_OPUS_CODEC + #define NETEQ_ISAC_FB_CODEC #endif /* Max output size from decoding one frame */ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/neteq_unittests.isolate b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/neteq_unittests.isolate index 78d805ccfe63..e8f4e482aaf5 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/neteq_unittests.isolate +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/neteq_unittests.isolate @@ -21,7 +21,6 @@ 'variables': { 'command': [ '../../../../testing/test_env.py', - '../../../../tools/swarm_client/googletest/run_test_cases.py', '<(PRODUCT_DIR)/neteq_unittests<(EXECUTABLE_SUFFIX)', ], 'isolate_dependency_touched': [ @@ -34,11 +33,11 @@ '../../../../resources/audio_coding/neteq_universal_ref.pcm', '../../../../resources/audio_coding/testfile32kHz.pcm', '../../../../testing/test_env.py', - '../../../../tools/swarm_client/run_isolated.py', - '../../../../tools/swarm_client/googletest/run_test_cases.py', - '../../../../tools/swarm_client/third_party/upload.py', '<(PRODUCT_DIR)/neteq_unittests<(EXECUTABLE_SUFFIX)', ], + 'isolate_dependency_untracked': [ + '../../../../tools/swarming_client/', + ], }, }], ], diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/rtp.c b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/rtp.c index f23f3512b003..6ab5944b5aa7 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/rtp.c +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/rtp.c @@ -31,7 +31,7 @@ int WebRtcNetEQ_RTPPayloadInfo(int16_t* pw16_Datagram, int i_DatagramLen, return RTP_TOO_SHORT_PACKET; } -#ifdef WEBRTC_BIG_ENDIAN +#ifdef WEBRTC_ARCH_BIG_ENDIAN i_IPver = (((uint16_t) (pw16_Datagram[0] & 0xC000)) >> 14); /* Extract the version */ i_P = (((uint16_t) (pw16_Datagram[0] & 0x2000)) >> 13); /* Extract the P bit */ i_X = (((uint16_t) (pw16_Datagram[0] & 0x1000)) >> 12); /* Extract the X bit */ @@ -62,7 +62,7 @@ int WebRtcNetEQ_RTPPayloadInfo(int16_t* pw16_Datagram, int i_DatagramLen, i_padlength = ((pw16_Datagram[(i_DatagramLen >> 1) - 1]) & 0xFF); } } -#else /* WEBRTC_LITTLE_ENDIAN */ +#else /* WEBRTC_ARCH_LITTLE_ENDIAN */ i_IPver = (((uint16_t) (pw16_Datagram[0] & 0xC0)) >> 6); /* Extract the IP version */ i_P = (((uint16_t) (pw16_Datagram[0] & 0x20)) >> 5); /* Extract the P bit */ i_X = (((uint16_t) (pw16_Datagram[0] & 0x10)) >> 4); /* Extract the X bit */ @@ -126,7 +126,7 @@ int WebRtcNetEQ_RedundancySplit(RTPPacket_t* RTPheader[], int i_MaximumPayloads, int i_discardedBlockLength = 0; int singlePayload = 0; -#ifdef WEBRTC_BIG_ENDIAN +#ifdef WEBRTC_ARCH_BIG_ENDIAN if ((pw16_data[0] & 0x8000) == 0) { /* Only one payload in this packet*/ @@ -155,7 +155,7 @@ int WebRtcNetEQ_RedundancySplit(RTPPacket_t* RTPheader[], int i_MaximumPayloads, ((((uint16_t)pw16_data[1]) & 0xFC00) >> 10); i_blockLength = (((uint16_t)pw16_data[1]) & 0x3FF); } -#else /* WEBRTC_LITTLE_ENDIAN */ +#else /* WEBRTC_ARCH_LITTLE_ENDIAN */ if ((pw16_data[0] & 0x80) == 0) { /* Only one payload in this packet */ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.cc index 5c28d491d8bf..f663343ae698 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.cc @@ -31,116 +31,121 @@ int NETEQTEST_DummyRTPpacket::readFromFile(FILE *fp) uint16_t length, plen; uint32_t offset; + int packetLen; - if (fread(&length, 2, 1, fp) == 0) - { + bool readNextPacket = true; + while (readNextPacket) { + readNextPacket = false; + if (fread(&length, 2, 1, fp) == 0) + { reset(); return -2; - } - length = ntohs(length); + } + length = ntohs(length); - if (fread(&plen, 2, 1, fp) == 0) - { + if (fread(&plen, 2, 1, fp) == 0) + { reset(); return -1; - } - int packetLen = ntohs(plen); + } + packetLen = ntohs(plen); - if (fread(&offset, 4, 1, fp) == 0) - { + if (fread(&offset, 4, 1, fp) == 0) + { reset(); return -1; - } - // Store in local variable until we have passed the reset below. - uint32_t receiveTime = ntohl(offset); + } + // Store in local variable until we have passed the reset below. + uint32_t receiveTime = ntohl(offset); - // Use length here because a plen of 0 specifies rtcp. - length = (uint16_t) (length - _kRDHeaderLen); + // Use length here because a plen of 0 specifies rtcp. + length = (uint16_t) (length - _kRDHeaderLen); - // check buffer size - if (_datagram && _memSize < length + 1) - { + // check buffer size + if (_datagram && _memSize < length + 1) + { reset(); - } + } - if (!_datagram) - { + if (!_datagram) + { // Add one extra byte, to be able to fake a dummy payload of one byte. _datagram = new uint8_t[length + 1]; _memSize = length + 1; - } - memset(_datagram, 0, length + 1); + } + memset(_datagram, 0, length + 1); - if (length == 0) - { + if (length == 0) + { _datagramLen = 0; return packetLen; - } + } - // Read basic header - if (fread(_datagram, 1, _kBasicHeaderLen, fp) - != (size_t)_kBasicHeaderLen) - { + // Read basic header + if (fread(_datagram, 1, _kBasicHeaderLen, fp) + != (size_t)_kBasicHeaderLen) + { reset(); return -1; - } - _receiveTime = receiveTime; - _datagramLen = _kBasicHeaderLen; - int header_length = _kBasicHeaderLen; + } + _receiveTime = receiveTime; + _datagramLen = _kBasicHeaderLen; + int header_length = _kBasicHeaderLen; - // Parse the basic header - WebRtcNetEQ_RTPInfo tempRTPinfo; - int P, X, CC; - parseBasicHeader(&tempRTPinfo, &P, &X, &CC); + // Parse the basic header + WebRtcNetEQ_RTPInfo tempRTPinfo; + int P, X, CC; + parseBasicHeader(&tempRTPinfo, &P, &X, &CC); - // Check if we have to extend the header - if (X != 0 || CC != 0) - { + // Check if we have to extend the header + if (X != 0 || CC != 0) + { int newLen = _kBasicHeaderLen + CC * 4 + X * 4; assert(_memSize >= newLen + 1); // Read extension from file size_t readLen = newLen - _kBasicHeaderLen; if (fread(_datagram + _kBasicHeaderLen, 1, readLen, - fp) != readLen) + fp) != readLen) { - reset(); - return -1; + reset(); + return -1; } _datagramLen = newLen; header_length = newLen; if (X != 0) { - int totHdrLen = calcHeaderLength(X, CC); - assert(_memSize >= totHdrLen); + int totHdrLen = calcHeaderLength(X, CC); + assert(_memSize >= totHdrLen); - // Read extension from file - size_t readLen = totHdrLen - newLen; - if (fread(_datagram + newLen, 1, readLen, fp) - != readLen) - { - reset(); - return -1; - } - _datagramLen = totHdrLen; - header_length = totHdrLen; + // Read extension from file + size_t readLen = totHdrLen - newLen; + if (fread(_datagram + newLen, 1, readLen, fp) + != readLen) + { + reset(); + return -1; + } + _datagramLen = totHdrLen; + header_length = totHdrLen; } - } - // Make sure that we have at least one byte of dummy payload. - _datagramLen = std::max(static_cast(length), header_length + 1); - assert(_datagramLen <= _memSize); + } + // Make sure that we have at least one byte of dummy payload. + _datagramLen = std::max(static_cast(length), header_length + 1); + assert(_datagramLen <= _memSize); - if (!_blockList.empty() && _blockList.count(payloadType()) > 0) - { + if (!_blockList.empty() && _blockList.count(payloadType()) > 0) + { // discard this payload - return readFromFile(fp); - } + readNextPacket = true; + } - if (_filterSSRC && _selectSSRC != SSRC()) - { + if (_filterSSRC && _selectSSRC != SSRC()) + { // Discard this payload. - return(readFromFile(fp)); + readNextPacket = true; + } } return packetLen; diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.cc index 49ce02d4f0ec..9eb20a91a0f8 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.cc @@ -107,62 +107,66 @@ int NETEQTEST_RTPpacket::readFromFile(FILE *fp) uint16_t length, plen; uint32_t offset; + int packetLen; - if (fread(&length,2,1,fp)==0) - { - reset(); - return(-2); - } - length = ntohs(length); + bool readNextPacket = true; + while (readNextPacket) { + readNextPacket = false; + if (fread(&length,2,1,fp)==0) + { + reset(); + return(-2); + } + length = ntohs(length); - if (fread(&plen,2,1,fp)==0) - { - reset(); - return(-1); - } - int packetLen = ntohs(plen); + if (fread(&plen,2,1,fp)==0) + { + reset(); + return(-1); + } + packetLen = ntohs(plen); - if (fread(&offset,4,1,fp)==0) - { - reset(); - return(-1); - } - uint32_t receiveTime = ntohl(offset); // store in local variable until we have passed the reset below + if (fread(&offset,4,1,fp)==0) + { + reset(); + return(-1); + } + // store in local variable until we have passed the reset below + uint32_t receiveTime = ntohl(offset); - // Use length here because a plen of 0 specifies rtcp - length = (uint16_t) (length - _kRDHeaderLen); + // Use length here because a plen of 0 specifies rtcp + length = (uint16_t) (length - _kRDHeaderLen); - // check buffer size - if (_datagram && _memSize < length) - { - reset(); - } + // check buffer size + if (_datagram && _memSize < length) + { + reset(); + } - if (!_datagram) - { - _datagram = new uint8_t[length]; - _memSize = length; - } + if (!_datagram) + { + _datagram = new uint8_t[length]; + _memSize = length; + } - if (fread((unsigned short *) _datagram,1,length,fp) != length) - { - reset(); - return(-1); - } + if (fread((unsigned short *) _datagram,1,length,fp) != length) + { + reset(); + return(-1); + } - _datagramLen = length; - _receiveTime = receiveTime; + _datagramLen = length; + _receiveTime = receiveTime; - if (!_blockList.empty() && _blockList.count(payloadType()) > 0) - { - // discard this payload - return(readFromFile(fp)); - } + if (!_blockList.empty() && _blockList.count(payloadType()) > 0) + { + readNextPacket = true; + } - if (_filterSSRC && _selectSSRC != SSRC()) - { - // Discard this payload. - return(readFromFile(fp)); + if (_filterSSRC && _selectSSRC != SSRC()) + { + readNextPacket = true; + } } return(packetLen); diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/test/delay_tool/plot_neteq_delay.m b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/test/delay_tool/plot_neteq_delay.m index bc1c85a20214..d1d0cb8ed138 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/test/delay_tool/plot_neteq_delay.m +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/test/delay_tool/plot_neteq_delay.m @@ -57,7 +57,6 @@ if length(unique(s.ts(isfinite(s.ts)))) < length(s.ts(isfinite(s.ts))) s.playout_delay=s.playout_delay(ix); s.pt=s.pt(ix); s.optbuf=s.optbuf(ix); - plen=plen(ix); s.decode=s.decode(ix); end @@ -76,17 +75,30 @@ s.decode=s.decode(sort_ix); s.playout_delay=s.playout_delay(sort_ix); s.pt=s.pt(sort_ix); -send_t=s.ts-s.ts(1); +ts_unw = unwrap_ts(s.ts); +unwrapped = any(ts_unw ~= s.ts); +send_t = ts_unw - ts_unw(1); + if length(s.fs)<1 warning('No info about sample rate found in file. Using default 8000.'); s.fs(1)=8000; s.fschange_ts(1)=min(s.ts); -elseif s.fschange_ts(1)>min(s.ts) - s.fschange_ts(1)=min(s.ts); +elseif s.fschange_ts(1) ~= s.ts(1) + if ~unwrapped + s.fschange_ts(1) = s.ts(1); + else + error('TS wrapped, and sample rate change info is not found at the start of file => problem...') + end end end_ix=length(send_t); for k=length(s.fs):-1:1 + if (k < length(s.fs) && s.fschange_ts(k) > s.fschange_ts(k+1)) + % The sample rate changes are out of order, probably due to + % packet re-ordering. + warning('fschange_ts is out of order') + continue % Skip to the next one. + end start_ix=find(s.ts==s.fschange_ts(k)); send_t(start_ix:end_ix)=send_t(start_ix:end_ix)/s.fs(k)*1000; s.playout_delay(start_ix:end_ix)=s.playout_delay(start_ix:end_ix)/s.fs(k)*1000; @@ -142,12 +154,14 @@ use_ix = intersect(cng_ix,... % use those that are not CNG/SID frames... mean_delay = mean(s.decode(use_ix)+s.playout_delay(use_ix)-send_t(use_ix)); neteq_delay = mean(s.decode(use_ix)+s.playout_delay(use_ix)-s.arrival(use_ix)); +max_neteq_delay = max(s.decode(use_ix)+s.playout_delay(use_ix)-s.arrival(use_ix)); Npack=max(s.sn(delayskip_ix:end))-min(s.sn(delayskip_ix:end))+1; nw_lossrate=(Npack-length(s.sn(delayskip_ix:end)))/Npack; neteq_lossrate=(length(s.sn(delayskip_ix:end))-length(use_ix))/Npack; delay_struct=struct('mean_delay',mean_delay,'neteq_delay',neteq_delay,... + 'max_neteq_delay', max_neteq_delay,... 'nw_lossrate',nw_lossrate,'neteq_lossrate',neteq_lossrate,... 'tot_expand',round(s.tot_expand),'tot_accelerate',round(s.tot_accelerate),... 'tot_preemptive',round(s.tot_preemptive),'tot_time',tot_time,... @@ -160,7 +174,7 @@ if not(isempty(delaypoints)) else delayvalues=[]; end - +end % SUBFUNCTIONS % @@ -179,9 +193,15 @@ while ~isempty(jumps) x(n+1:end)=x(n+1:end)-65536; end - jumps=find(abs((diff(x(n+1:end))-1))>65000); + jumps=find(abs((diff(x)-1))>65000); end y=x; -return; +end + +function y = unwrap_ts(x) + max_u32 = 4294967295; % 0xFFFFFFFF + % Use the unwrap function made for unrwapping phase angle in radians. + y = round(max_u32 / (2*pi) * unwrap(x * 2*pi / max_u32)); +end diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/webrtc_neteq.c b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/webrtc_neteq.c index de1ccd1e3cba..fad690d08102 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/webrtc_neteq.c +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/webrtc_neteq.c @@ -1104,14 +1104,6 @@ int WebRtcNetEQ_GetSpeechOutputType(void *inst, enum WebRtcNetEQOutputType *outp /* If CN or internal CNG */ *outputType = kOutputCNG; -#ifdef NETEQ_VAD - } - else if ( NetEqMainInst->DSPinst.VADInst.VADDecision == 0 ) - { - /* post-decode VAD says passive speaker */ - *outputType = kOutputVADPassive; -#endif /* NETEQ_VAD */ - } else if ((NetEqMainInst->DSPinst.w16_mode == MODE_EXPAND) && (NetEqMainInst->DSPinst.ExpandInst.w16_expandMuteFactor == 0)) @@ -1125,6 +1117,14 @@ int WebRtcNetEQ_GetSpeechOutputType(void *inst, enum WebRtcNetEQOutputType *outp /* PLC mode */ *outputType = kOutputPLC; +#ifdef NETEQ_VAD + } + else if ( NetEqMainInst->DSPinst.VADInst.VADDecision == 0 ) + { + /* post-decode VAD says passive speaker */ + *outputType = kOutputVADPassive; +#endif /* NETEQ_VAD */ + } else { diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/webrtc_neteq_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/webrtc_neteq_unittest.cc index 8f1c6baabaf4..c37f8990a8b3 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/webrtc_neteq_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/webrtc_neteq_unittest.cc @@ -17,6 +17,7 @@ #include #include // memset +#include #include #include #include @@ -193,6 +194,8 @@ class NetEqDecodingTest : public ::testing::Test { WebRtcNetEQ_RTPInfo* rtp_info, uint8_t* payload, int* payload_len); + void WrapTest(uint16_t start_seq_no, uint32_t start_timestamp, + const std::set& drop_seq_numbers); NETEQTEST_NetEQClass* neteq_inst_; std::vector dec_; @@ -505,7 +508,7 @@ TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimeNegative) { WebRtcNetEQ_NetworkStatistics network_stats; ASSERT_EQ(0, WebRtcNetEQ_GetNetworkStatistics(neteq_inst_->instance(), &network_stats)); - EXPECT_EQ(-106911, network_stats.clockDriftPPM); + EXPECT_EQ(-103196, network_stats.clockDriftPPM); } TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimePositive) { @@ -536,7 +539,7 @@ TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimePositive) { WebRtcNetEQ_NetworkStatistics network_stats; ASSERT_EQ(0, WebRtcNetEQ_GetNetworkStatistics(neteq_inst_->instance(), &network_stats)); - EXPECT_EQ(108352, network_stats.clockDriftPPM); + EXPECT_EQ(110946, network_stats.clockDriftPPM); } TEST_F(NetEqDecodingTest, LongCngWithClockDrift) { @@ -700,4 +703,76 @@ TEST_F(NetEqDecodingTest, TestExtraDelay) { } } +void NetEqDecodingTest::WrapTest(uint16_t start_seq_no, + uint32_t start_timestamp, + const std::set& drop_seq_numbers) { + uint16_t seq_no = start_seq_no; + uint32_t timestamp = start_timestamp; + const int kFrameSizeMs = 30; + const int kSamples = kFrameSizeMs * 16; + const int kPayloadBytes = kSamples * 2; + double next_input_time_ms = 0.0; + + // Insert speech for 1 second. + const int kSpeechDurationMs = 1000; + for (double t_ms = 0; t_ms < kSpeechDurationMs; t_ms += 10) { + // Each turn in this for loop is 10 ms. + while (next_input_time_ms <= t_ms) { + // Insert one 30 ms speech frame. + uint8_t payload[kPayloadBytes] = {0}; + WebRtcNetEQ_RTPInfo rtp_info; + PopulateRtpInfo(seq_no, timestamp, &rtp_info); + if (drop_seq_numbers.find(seq_no) == drop_seq_numbers.end()) { + // This sequence number was not in the set to drop. Insert it. + ASSERT_EQ(0, + WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(), + &rtp_info, + payload, + kPayloadBytes, 0)); + } + ++seq_no; + timestamp += kSamples; + next_input_time_ms += static_cast(kFrameSizeMs); + WebRtcNetEQ_NetworkStatistics network_stats; + ASSERT_EQ(0, WebRtcNetEQ_GetNetworkStatistics(neteq_inst_->instance(), + &network_stats)); + // Expect preferred and actual buffer size to be no more than 2 frames. + EXPECT_LE(network_stats.preferredBufferSize, kFrameSizeMs * 2); + EXPECT_LE(network_stats.currentBufferSize, kFrameSizeMs * 2); + } + // Pull out data once. + ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_)); + // Expect delay (in samples) to be less than 2 packets. + EXPECT_LE(timestamp - neteq_inst_->getSpeechTimeStamp(), + static_cast(kSamples * 2)); + } +} + +TEST_F(NetEqDecodingTest, SequenceNumberWrap) { + // Start with a sequence number that will soon wrap. + std::set drop_seq_numbers; // Don't drop any packets. + WrapTest(0xFFFF - 5, 0, drop_seq_numbers); +} + +TEST_F(NetEqDecodingTest, SequenceNumberWrapAndDrop) { + // Start with a sequence number that will soon wrap. + std::set drop_seq_numbers; + drop_seq_numbers.insert(0xFFFF); + drop_seq_numbers.insert(0x0); + WrapTest(0xFFFF - 5, 0, drop_seq_numbers); +} + +TEST_F(NetEqDecodingTest, TimestampWrap) { + // Start with a timestamp that will soon wrap. + std::set drop_seq_numbers; + WrapTest(0, 0xFFFFFFFF - 1000, drop_seq_numbers); +} + +TEST_F(NetEqDecodingTest, TimestampAndSequenceNumberWrap) { + // Start with a timestamp and a sequence number that will wrap at the same + // time. + std::set drop_seq_numbers; + WrapTest(0xFFFF - 2, 0xFFFFFFFF - 1000, drop_seq_numbers); +} + } // namespace diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/accelerate.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/accelerate.cc index 37ce2e7b212a..eb546e97640f 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/accelerate.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/accelerate.cc @@ -17,7 +17,7 @@ namespace webrtc { Accelerate::ReturnCodes Accelerate::Process( const int16_t* input, size_t input_length, - AudioMultiVector* output, + AudioMultiVector* output, int16_t* length_change_samples) { // Input length must be (almost) 30 ms. static const int k15ms = 120; // 15 ms = 120 samples at 8 kHz sample rate. @@ -43,7 +43,7 @@ void Accelerate::SetParametersForPassiveSpeech(size_t /*len*/, Accelerate::ReturnCodes Accelerate::CheckCriteriaAndStretch( const int16_t* input, size_t input_length, size_t peak_index, int16_t best_correlation, bool active_speech, - AudioMultiVector* output) const { + AudioMultiVector* output) const { // Check for strong correlation or passive speech. if ((best_correlation > kCorrelationThreshold) || !active_speech) { // Do accelerate operation by overlap add. @@ -56,7 +56,7 @@ Accelerate::ReturnCodes Accelerate::CheckCriteriaAndStretch( // Copy first part; 0 to 15 ms. output->PushBackInterleaved(input, fs_mult_120 * num_channels_); // Copy the |peak_index| starting at 15 ms to |temp_vector|. - AudioMultiVector temp_vector(num_channels_); + AudioMultiVector temp_vector(num_channels_); temp_vector.PushBackInterleaved(&input[fs_mult_120 * num_channels_], peak_index * num_channels_); // Cross-fade |temp_vector| onto the end of |output|. @@ -78,4 +78,11 @@ Accelerate::ReturnCodes Accelerate::CheckCriteriaAndStretch( } } +Accelerate* AccelerateFactory::Create( + int sample_rate_hz, + size_t num_channels, + const BackgroundNoise& background_noise) const { + return new Accelerate(sample_rate_hz, num_channels, background_noise); +} + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/accelerate.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/accelerate.h index 375a6f021b40..81f1abb53c08 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/accelerate.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/accelerate.h @@ -43,7 +43,7 @@ class Accelerate : public TimeStretch { // the outcome of the operation as an enumerator value. ReturnCodes Process(const int16_t* input, size_t input_length, - AudioMultiVector* output, + AudioMultiVector* output, int16_t* length_change_samples); protected: @@ -58,11 +58,20 @@ class Accelerate : public TimeStretch { virtual ReturnCodes CheckCriteriaAndStretch( const int16_t* input, size_t input_length, size_t peak_index, int16_t best_correlation, bool active_speech, - AudioMultiVector* output) const OVERRIDE; + AudioMultiVector* output) const OVERRIDE; private: DISALLOW_COPY_AND_ASSIGN(Accelerate); }; +struct AccelerateFactory { + AccelerateFactory() {} + virtual ~AccelerateFactory() {} + + virtual Accelerate* Create(int sample_rate_hz, + size_t num_channels, + const BackgroundNoise& background_noise) const; +}; + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_ACCELERATE_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_decoder.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_decoder.cc index 77111ebf9a73..35422e3f9f5c 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_decoder.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_decoder.cc @@ -74,6 +74,10 @@ bool AudioDecoder::CodecSupported(NetEqDecoder codec_type) { case kDecoderG722: case kDecoderG722_2ch: #endif +#ifdef WEBRTC_CODEC_CELT + case kDecoderCELT_32: + case kDecoderCELT_32_2ch: +#endif #ifdef WEBRTC_CODEC_OPUS case kDecoderOpus: case kDecoderOpus_2ch: @@ -131,6 +135,10 @@ int AudioDecoder::CodecSampleRateHz(NetEqDecoder codec_type) { #ifdef WEBRTC_CODEC_PCM16 case kDecoderPCM16Bswb32kHz: case kDecoderPCM16Bswb32kHz_2ch: +#endif +#ifdef WEBRTC_CODEC_CELT + case kDecoderCELT_32: + case kDecoderCELT_32_2ch: #endif case kDecoderCNGswb32kHz: { return 32000; @@ -206,6 +214,11 @@ AudioDecoder* AudioDecoder::CreateAudioDecoder(NetEqDecoder codec_type) { case kDecoderG722_2ch: return new AudioDecoderG722Stereo; #endif +#ifdef WEBRTC_CODEC_CELT + case kDecoderCELT_32: + case kDecoderCELT_32_2ch: + return new AudioDecoderCelt(codec_type); +#endif #ifdef WEBRTC_CODEC_OPUS case kDecoderOpus: case kDecoderOpus_2ch: diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_decoder_impl.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_decoder_impl.cc index 53119b1abc04..5296a1bd0f93 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_decoder_impl.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_decoder_impl.cc @@ -13,6 +13,9 @@ #include #include // memmove +#ifdef WEBRTC_CODEC_CELT +#include "webrtc/modules/audio_coding/codecs/celt/include/celt_interface.h" +#endif #include "webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h" #include "webrtc/modules/audio_coding/codecs/g711/include/g711_interface.h" #ifdef WEBRTC_CODEC_G722 @@ -377,6 +380,55 @@ void AudioDecoderG722Stereo::SplitStereoPacket(const uint8_t* encoded, } #endif +// CELT +#ifdef WEBRTC_CODEC_CELT +AudioDecoderCelt::AudioDecoderCelt(enum NetEqDecoder type) + : AudioDecoder(type) { + assert(type == kDecoderCELT_32 || type == kDecoderCELT_32_2ch); + if (type == kDecoderCELT_32) { + channels_ = 1; + } else { + channels_ = 2; + } + WebRtcCelt_CreateDec(reinterpret_cast(&state_), + static_cast(channels_)); +} + +AudioDecoderCelt::~AudioDecoderCelt() { + WebRtcCelt_FreeDec(static_cast(state_)); +} + +int AudioDecoderCelt::Decode(const uint8_t* encoded, size_t encoded_len, + int16_t* decoded, SpeechType* speech_type) { + int16_t temp_type = 1; // Default to speech. + int ret = WebRtcCelt_DecodeUniversal(static_cast(state_), + encoded, static_cast(encoded_len), + decoded, &temp_type); + *speech_type = ConvertSpeechType(temp_type); + if (ret < 0) { + return -1; + } + // Return the total number of samples. + return ret * static_cast(channels_); +} + +int AudioDecoderCelt::Init() { + return WebRtcCelt_DecoderInit(static_cast(state_)); +} + +bool AudioDecoderCelt::HasDecodePlc() const { return true; } + +int AudioDecoderCelt::DecodePlc(int num_frames, int16_t* decoded) { + int ret = WebRtcCelt_DecodePlc(static_cast(state_), + decoded, num_frames); + if (ret < 0) { + return -1; + } + // Return the total number of samples. + return ret * static_cast(channels_); +} +#endif + // Opus #ifdef WEBRTC_CODEC_OPUS AudioDecoderOpus::AudioDecoderOpus(enum NetEqDecoder type) diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_decoder_impl.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_decoder_impl.h index b74aed8974b9..aa35db78082f 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_decoder_impl.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_decoder_impl.h @@ -212,6 +212,23 @@ class AudioDecoderG722Stereo : public AudioDecoderG722 { }; #endif +#ifdef WEBRTC_CODEC_CELT +class AudioDecoderCelt : public AudioDecoder { + public: + explicit AudioDecoderCelt(enum NetEqDecoder type); + virtual ~AudioDecoderCelt(); + + virtual int Decode(const uint8_t* encoded, size_t encoded_len, + int16_t* decoded, SpeechType* speech_type); + virtual int Init(); + virtual bool HasDecodePlc() const; + virtual int DecodePlc(int num_frames, int16_t* decoded); + + private: + DISALLOW_COPY_AND_ASSIGN(AudioDecoderCelt); +}; +#endif + #ifdef WEBRTC_CODEC_OPUS class AudioDecoderOpus : public AudioDecoder { public: diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_decoder_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_decoder_unittest.cc index cb4d11d965d1..dbd9d121f4a0 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_decoder_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_decoder_unittest.cc @@ -17,6 +17,9 @@ #include "gtest/gtest.h" #include "webrtc/common_audio/resampler/include/resampler.h" +#ifdef WEBRTC_CODEC_CELT +#include "webrtc/modules/audio_coding/codecs/celt/include/celt_interface.h" +#endif #include "webrtc/modules/audio_coding/codecs/g711/include/g711_interface.h" #include "webrtc/modules/audio_coding/codecs/g722/include/g722_interface.h" #include "webrtc/modules/audio_coding/codecs/ilbc/interface/ilbc.h" @@ -116,14 +119,16 @@ class AudioDecoderTest : public ::testing::Test { encoded_bytes_ += enc_len; processed_samples += frame_size_; } - // This test fails on Win x64, see issue webrtc:1459 -#if !(defined(_WIN32) && defined(WEBRTC_ARCH_64_BITS)) - EXPECT_EQ(expected_bytes, encoded_bytes_); + // For some codecs it doesn't make sense to check expected number of bytes, + // since the number can vary for different platforms. Opus and iSAC are + // such codecs. In this case expected_bytes is set to 0. + if (expected_bytes) { + EXPECT_EQ(expected_bytes, encoded_bytes_); + } CompareInputOutput(processed_samples, tolerance, delay); if (channels_ == 2) CompareTwoChannels(processed_samples, channel_diff_tolerance); EXPECT_LE(MseInputOutput(processed_samples, delay), mse); -#endif } // The absolute difference between the input and output (the first channel) is @@ -143,11 +148,12 @@ class AudioDecoderTest : public ::testing::Test { // The absolute difference between the two channels in a stereo is compared vs // |tolerance|. - virtual void CompareTwoChannels(size_t num_samples, int tolerance) const { - assert(num_samples <= data_length_); - for (unsigned int n = 0; n < num_samples; ++n) - ASSERT_NEAR(decoded_[channels_ * n], decoded_[channels_ * n + 1], - tolerance) << "Stereo samples differ."; + virtual void CompareTwoChannels(size_t samples_per_channel, + int tolerance) const { + assert(samples_per_channel <= data_length_); + for (unsigned int n = 0; n < samples_per_channel; ++n) + ASSERT_NEAR(decoded_[channels_ * n], decoded_[channels_ * n + 1], + tolerance) << "Stereo samples differ."; } // Calculates mean-squared error between input and output (the first channel). @@ -199,12 +205,12 @@ class AudioDecoderTest : public ::testing::Test { EXPECT_EQ(0, decoder_->Init()); size_t dec_len = decoder_->Decode(encoded_, enc_len, decoded_, &speech_type); - EXPECT_EQ(frame_size_, dec_len); + EXPECT_EQ(frame_size_ * channels_, dec_len); // Call DecodePlc and verify that we get one frame of data. // (Overwrite the output from the above Decode call, but that does not // matter.) dec_len = decoder_->DecodePlc(1, decoded_); - EXPECT_EQ(frame_size_, dec_len); + EXPECT_EQ(frame_size_ * channels_, dec_len); } std::string input_file_; @@ -527,6 +533,77 @@ class AudioDecoderG722StereoTest : public AudioDecoderG722Test { } }; +#ifdef WEBRTC_CODEC_CELT +class AudioDecoderCeltTest : public AudioDecoderTest { + protected: + static const int kEncodingRateBitsPerSecond = 64000; + AudioDecoderCeltTest() : AudioDecoderTest(), encoder_(NULL) { + frame_size_ = 640; + data_length_ = 10 * frame_size_; + decoder_ = AudioDecoder::CreateAudioDecoder(kDecoderCELT_32); + assert(decoder_); + WebRtcCelt_CreateEnc(&encoder_, static_cast(channels_)); + } + + ~AudioDecoderCeltTest() { + WebRtcCelt_FreeEnc(encoder_); + } + + virtual void InitEncoder() { + assert(encoder_); + ASSERT_EQ(0, WebRtcCelt_EncoderInit( + encoder_, static_cast(channels_), kEncodingRateBitsPerSecond)); + } + + virtual int EncodeFrame(const int16_t* input, size_t input_len_samples, + uint8_t* output) { + assert(encoder_); + return WebRtcCelt_Encode(encoder_, input, output); + } + + CELT_encinst_t* encoder_; +}; + +class AudioDecoderCeltStereoTest : public AudioDecoderTest { + protected: + static const int kEncodingRateBitsPerSecond = 64000; + AudioDecoderCeltStereoTest() : AudioDecoderTest(), encoder_(NULL) { + channels_ = 2; + frame_size_ = 640; + data_length_ = 10 * frame_size_; + decoder_ = AudioDecoder::CreateAudioDecoder(kDecoderCELT_32_2ch); + assert(decoder_); + stereo_input_ = new int16_t[frame_size_ * channels_]; + WebRtcCelt_CreateEnc(&encoder_, static_cast(channels_)); + } + + ~AudioDecoderCeltStereoTest() { + delete [] stereo_input_; + WebRtcCelt_FreeEnc(encoder_); + } + + virtual void InitEncoder() { + assert(encoder_); + ASSERT_EQ(0, WebRtcCelt_EncoderInit( + encoder_, static_cast(channels_), kEncodingRateBitsPerSecond)); + } + + virtual int EncodeFrame(const int16_t* input, size_t input_len_samples, + uint8_t* output) { + assert(encoder_); + assert(stereo_input_); + for (size_t n = 0; n < frame_size_; ++n) { + stereo_input_[n * 2] = stereo_input_[n * 2 + 1] = input[n]; + } + return WebRtcCelt_Encode(encoder_, stereo_input_, output); + } + + int16_t* stereo_input_; + CELT_encinst_t* encoder_; +}; + +#endif + class AudioDecoderOpusTest : public AudioDecoderTest { protected: AudioDecoderOpusTest() : AudioDecoderTest() { @@ -658,7 +735,7 @@ TEST_F(AudioDecoderIsacFloatTest, EncodeDecode) { double mse = 434951.0; int delay = 48; // Delay from input to output. EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderISAC)); - EncodeDecodeTest(883, tolerance, mse, delay); + EncodeDecodeTest(0, tolerance, mse, delay); ReInitTest(); EXPECT_TRUE(decoder_->HasDecodePlc()); DecodePlcTest(); @@ -669,7 +746,7 @@ TEST_F(AudioDecoderIsacSwbTest, EncodeDecode) { double mse = 8.18e6; int delay = 160; // Delay from input to output. EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderISACswb)); - EncodeDecodeTest(853, tolerance, mse, delay); + EncodeDecodeTest(0, tolerance, mse, delay); ReInitTest(); EXPECT_TRUE(decoder_->HasDecodePlc()); DecodePlcTest(); @@ -680,7 +757,7 @@ TEST_F(AudioDecoderIsacFbTest, EncodeDecode) { double mse = 8.18e6; int delay = 160; // Delay from input to output. EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderISACswb)); - EncodeDecodeTest(853, tolerance, mse, delay); + EncodeDecodeTest(0, tolerance, mse, delay); ReInitTest(); EXPECT_TRUE(decoder_->HasDecodePlc()); DecodePlcTest(); @@ -726,7 +803,7 @@ TEST_F(AudioDecoderOpusTest, EncodeDecode) { double mse = 238630.0; int delay = 22; // Delay from input to output. EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderOpus)); - EncodeDecodeTest(731, tolerance, mse, delay); + EncodeDecodeTest(0, tolerance, mse, delay); ReInitTest(); EXPECT_FALSE(decoder_->HasDecodePlc()); } @@ -737,11 +814,43 @@ TEST_F(AudioDecoderOpusStereoTest, EncodeDecode) { double mse = 238630.0; int delay = 22; // Delay from input to output. EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderOpus_2ch)); - EncodeDecodeTest(1383, tolerance, mse, delay, channel_diff_tolerance); + EncodeDecodeTest(0, tolerance, mse, delay, channel_diff_tolerance); ReInitTest(); EXPECT_FALSE(decoder_->HasDecodePlc()); } +#ifdef WEBRTC_CODEC_CELT +// In the two following CELT tests, the low amplitude of the test signal allow +// us to have such low error thresholds, i.e. |tolerance|, |mse|. Furthermore, +// in general, stereo signals with identical channels do not result in identical +// encoded channels. +TEST_F(AudioDecoderCeltTest, EncodeDecode) { + int tolerance = 20; + double mse = 17.0; + int delay = 80; // Delay from input to output in samples. + EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderCELT_32)); + EncodeDecodeTest(1600, tolerance, mse, delay); + ReInitTest(); + EXPECT_TRUE(decoder_->HasDecodePlc()); + DecodePlcTest(); +} + +TEST_F(AudioDecoderCeltStereoTest, EncodeDecode) { + int tolerance = 20; + // If both channels are identical, CELT not necessarily decodes identical + // channels. However, for this input this is the case. + int channel_diff_tolerance = 0; + double mse = 20.0; + // Delay from input to output in samples, accounting for stereo. + int delay = 160; + EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderCELT_32_2ch)); + EncodeDecodeTest(1600, tolerance, mse, delay, channel_diff_tolerance); + ReInitTest(); + EXPECT_TRUE(decoder_->HasDecodePlc()); + DecodePlcTest(); +} +#endif + TEST(AudioDecoder, CodecSampleRateHz) { EXPECT_EQ(8000, AudioDecoder::CodecSampleRateHz(kDecoderPCMu)); EXPECT_EQ(8000, AudioDecoder::CodecSampleRateHz(kDecoderPCMa)); @@ -772,8 +881,13 @@ TEST(AudioDecoder, CodecSampleRateHz) { EXPECT_EQ(-1, AudioDecoder::CodecSampleRateHz(kDecoderArbitrary)); EXPECT_EQ(32000, AudioDecoder::CodecSampleRateHz(kDecoderOpus)); EXPECT_EQ(32000, AudioDecoder::CodecSampleRateHz(kDecoderOpus_2ch)); +#ifdef WEBRTC_CODEC_CELT + EXPECT_EQ(32000, AudioDecoder::CodecSampleRateHz(kDecoderCELT_32)); + EXPECT_EQ(32000, AudioDecoder::CodecSampleRateHz(kDecoderCELT_32_2ch)); +#else EXPECT_EQ(-1, AudioDecoder::CodecSampleRateHz(kDecoderCELT_32)); EXPECT_EQ(-1, AudioDecoder::CodecSampleRateHz(kDecoderCELT_32_2ch)); +#endif } TEST(AudioDecoder, CodecSupported) { @@ -805,8 +919,13 @@ TEST(AudioDecoder, CodecSupported) { EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderArbitrary)); EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderOpus)); EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderOpus_2ch)); +#ifdef WEBRTC_CODEC_CELT + EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderCELT_32)); + EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderCELT_32_2ch)); +#else EXPECT_FALSE(AudioDecoder::CodecSupported(kDecoderCELT_32)); EXPECT_FALSE(AudioDecoder::CodecSupported(kDecoderCELT_32_2ch)); +#endif } } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_decoder_unittests.isolate b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_decoder_unittests.isolate index 63177801a772..bb57e74b34fe 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_decoder_unittests.isolate +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_decoder_unittests.isolate @@ -21,7 +21,6 @@ 'variables': { 'command': [ '../../../../testing/test_env.py', - '../../../../tools/swarm_client/googletest/run_test_cases.py', '<(PRODUCT_DIR)/audio_decoder_unittests<(EXECUTABLE_SUFFIX)', ], 'isolate_dependency_touched': [ @@ -30,11 +29,11 @@ 'isolate_dependency_tracked': [ '../../../../resources/audio_coding/testfile32kHz.pcm', '../../../../testing/test_env.py', - '../../../../tools/swarm_client/run_isolated.py', - '../../../../tools/swarm_client/googletest/run_test_cases.py', - '../../../../tools/swarm_client/third_party/upload.py', '<(PRODUCT_DIR)/audio_decoder_unittests<(EXECUTABLE_SUFFIX)', ], + 'isolate_dependency_untracked': [ + '../../../../tools/swarming_client/', + ], }, }], ], diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_multi_vector.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_multi_vector.cc index 7eb214278248..b49f8b0e8ae9 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_multi_vector.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_multi_vector.cc @@ -18,130 +18,118 @@ namespace webrtc { -template -AudioMultiVector::AudioMultiVector(size_t N) { +AudioMultiVector::AudioMultiVector(size_t N) { assert(N > 0); if (N < 1) N = 1; for (size_t n = 0; n < N; ++n) { - channels_.push_back(new AudioVector); + channels_.push_back(new AudioVector); } + num_channels_ = N; } -template -AudioMultiVector::AudioMultiVector(size_t N, size_t initial_size) { +AudioMultiVector::AudioMultiVector(size_t N, size_t initial_size) { assert(N > 0); if (N < 1) N = 1; for (size_t n = 0; n < N; ++n) { - channels_.push_back(new AudioVector(initial_size)); + channels_.push_back(new AudioVector(initial_size)); } + num_channels_ = N; } -template -AudioMultiVector::~AudioMultiVector() { - typename std::vector*>::iterator it = channels_.begin(); +AudioMultiVector::~AudioMultiVector() { + std::vector::iterator it = channels_.begin(); while (it != channels_.end()) { delete (*it); ++it; } } -template -void AudioMultiVector::Clear() { - for (size_t i = 0; i < Channels(); ++i) { +void AudioMultiVector::Clear() { + for (size_t i = 0; i < num_channels_; ++i) { channels_[i]->Clear(); } } -template -void AudioMultiVector::Zeros(size_t length) { - for (size_t i = 0; i < Channels(); ++i) { +void AudioMultiVector::Zeros(size_t length) { + for (size_t i = 0; i < num_channels_; ++i) { channels_[i]->Clear(); channels_[i]->Extend(length); } } -template -void AudioMultiVector::CopyFrom(AudioMultiVector* copy_to) const { +void AudioMultiVector::CopyFrom(AudioMultiVector* copy_to) const { if (copy_to) { - for (size_t i = 0; i < Channels(); ++i) { + for (size_t i = 0; i < num_channels_; ++i) { channels_[i]->CopyFrom(&(*copy_to)[i]); } } } -template -void AudioMultiVector::PushBackInterleaved(const T* append_this, - size_t length) { - assert(length % Channels() == 0); - if (Channels() == 1) { +void AudioMultiVector::PushBackInterleaved(const int16_t* append_this, + size_t length) { + assert(length % num_channels_ == 0); + if (num_channels_ == 1) { // Special case to avoid extra allocation and data shuffling. channels_[0]->PushBack(append_this, length); return; } - size_t length_per_channel = length / Channels(); - T* temp_array = new T[length_per_channel]; // Intermediate storage. - for (size_t channel = 0; channel < Channels(); ++channel) { + size_t length_per_channel = length / num_channels_; + int16_t* temp_array = new int16_t[length_per_channel]; // Temporary storage. + for (size_t channel = 0; channel < num_channels_; ++channel) { // Copy elements to |temp_array|. // Set |source_ptr| to first element of this channel. - const T* source_ptr = &append_this[channel]; + const int16_t* source_ptr = &append_this[channel]; for (size_t i = 0; i < length_per_channel; ++i) { temp_array[i] = *source_ptr; - source_ptr += Channels(); // Jump to next element of this channel. + source_ptr += num_channels_; // Jump to next element of this channel. } channels_[channel]->PushBack(temp_array, length_per_channel); } delete [] temp_array; } -template -void AudioMultiVector::PushBack(const AudioMultiVector& append_this) { - assert(Channels() == append_this.Channels()); - if (Channels() == append_this.Channels()) { - for (size_t i = 0; i < Channels(); ++i) { +void AudioMultiVector::PushBack(const AudioMultiVector& append_this) { + assert(num_channels_ == append_this.num_channels_); + if (num_channels_ == append_this.num_channels_) { + for (size_t i = 0; i < num_channels_; ++i) { channels_[i]->PushBack(append_this[i]); } } } -template -void AudioMultiVector::PushBackFromIndex( - const AudioMultiVector& append_this, - size_t index) { +void AudioMultiVector::PushBackFromIndex(const AudioMultiVector& append_this, + size_t index) { assert(index < append_this.Size()); index = std::min(index, append_this.Size() - 1); size_t length = append_this.Size() - index; - assert(Channels() == append_this.Channels()); - if (Channels() == append_this.Channels()) { - for (size_t i = 0; i < Channels(); ++i) { + assert(num_channels_ == append_this.num_channels_); + if (num_channels_ == append_this.num_channels_) { + for (size_t i = 0; i < num_channels_; ++i) { channels_[i]->PushBack(&append_this[i][index], length); } } } -template -void AudioMultiVector::PopFront(size_t length) { - for (size_t i = 0; i < Channels(); ++i) { +void AudioMultiVector::PopFront(size_t length) { + for (size_t i = 0; i < num_channels_; ++i) { channels_[i]->PopFront(length); } } -template -void AudioMultiVector::PopBack(size_t length) { - for (size_t i = 0; i < Channels(); ++i) { +void AudioMultiVector::PopBack(size_t length) { + for (size_t i = 0; i < num_channels_; ++i) { channels_[i]->PopBack(length); } } -template -size_t AudioMultiVector::ReadInterleaved(size_t length, - T* destination) const { +size_t AudioMultiVector::ReadInterleaved(size_t length, + int16_t* destination) const { return ReadInterleavedFromIndex(0, length, destination); } -template -size_t AudioMultiVector::ReadInterleavedFromIndex(size_t start_index, - size_t length, - T* destination) const { +size_t AudioMultiVector::ReadInterleavedFromIndex(size_t start_index, + size_t length, + int16_t* destination) const { if (!destination) { return 0; } @@ -151,8 +139,13 @@ size_t AudioMultiVector::ReadInterleavedFromIndex(size_t start_index, if (length + start_index > Size()) { length = Size() - start_index; } + if (num_channels_ == 1) { + // Special case to avoid the nested for loop below. + memcpy(destination, &(*this)[0][start_index], length * sizeof(int16_t)); + return length; + } for (size_t i = 0; i < length; ++i) { - for (size_t channel = 0; channel < Channels(); ++channel) { + for (size_t channel = 0; channel < num_channels_; ++channel) { destination[index] = (*this)[channel][i + start_index]; ++index; } @@ -160,74 +153,61 @@ size_t AudioMultiVector::ReadInterleavedFromIndex(size_t start_index, return index; } -template -size_t AudioMultiVector::ReadInterleavedFromEnd(size_t length, - T* destination) const { +size_t AudioMultiVector::ReadInterleavedFromEnd(size_t length, + int16_t* destination) const { length = std::min(length, Size()); // Cannot read more than Size() elements. return ReadInterleavedFromIndex(Size() - length, length, destination); } -template -void AudioMultiVector::OverwriteAt(const AudioMultiVector& insert_this, - size_t length, - size_t position) { - assert(Channels() == insert_this.Channels()); +void AudioMultiVector::OverwriteAt(const AudioMultiVector& insert_this, + size_t length, + size_t position) { + assert(num_channels_ == insert_this.num_channels_); // Cap |length| at the length of |insert_this|. assert(length <= insert_this.Size()); length = std::min(length, insert_this.Size()); - if (Channels() == insert_this.Channels()) { - for (size_t i = 0; i < Channels(); ++i) { + if (num_channels_ == insert_this.num_channels_) { + for (size_t i = 0; i < num_channels_; ++i) { channels_[i]->OverwriteAt(&insert_this[i][0], length, position); } } } -template -void AudioMultiVector::CrossFade(const AudioMultiVector& append_this, - size_t fade_length) { - assert(Channels() == append_this.Channels()); - if (Channels() == append_this.Channels()) { - for (size_t i = 0; i < Channels(); ++i) { +void AudioMultiVector::CrossFade(const AudioMultiVector& append_this, + size_t fade_length) { + assert(num_channels_ == append_this.num_channels_); + if (num_channels_ == append_this.num_channels_) { + for (size_t i = 0; i < num_channels_; ++i) { channels_[i]->CrossFade(append_this[i], fade_length); } } } -template -size_t AudioMultiVector::Size() const { +size_t AudioMultiVector::Size() const { assert(channels_[0]); return channels_[0]->Size(); } -template -void AudioMultiVector::AssertSize(size_t required_size) { +void AudioMultiVector::AssertSize(size_t required_size) { if (Size() < required_size) { size_t extend_length = required_size - Size(); - for (size_t channel = 0; channel < Channels(); ++channel) { + for (size_t channel = 0; channel < num_channels_; ++channel) { channels_[channel]->Extend(extend_length); } } } -template -bool AudioMultiVector::Empty() const { +bool AudioMultiVector::Empty() const { assert(channels_[0]); return channels_[0]->Empty(); } -template -const AudioVector& AudioMultiVector::operator[](size_t index) const { +const AudioVector& AudioMultiVector::operator[](size_t index) const { return *(channels_[index]); } -template -AudioVector& AudioMultiVector::operator[](size_t index) { +AudioVector& AudioMultiVector::operator[](size_t index) { return *(channels_[index]); } -// Instantiate the template for a few types. -template class AudioMultiVector; -template class AudioMultiVector; -template class AudioMultiVector; - } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_multi_vector.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_multi_vector.h index 7167c670cd3d..2d0a74949128 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_multi_vector.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_multi_vector.h @@ -17,10 +17,10 @@ #include "webrtc/modules/audio_coding/neteq4/audio_vector.h" #include "webrtc/system_wrappers/interface/constructor_magic.h" +#include "webrtc/typedefs.h" namespace webrtc { -template class AudioMultiVector { public: // Creates an empty AudioMultiVector with |N| audio channels. |N| must be @@ -43,23 +43,23 @@ class AudioMultiVector { // are deleted. After the operation is done, |copy_to| will be an exact // replica of this object. The source and the destination must have the same // number of channels. - virtual void CopyFrom(AudioMultiVector* copy_to) const; + virtual void CopyFrom(AudioMultiVector* copy_to) const; // Appends the contents of array |append_this| to the end of this // object. The array is assumed to be channel-interleaved. |length| must be // an even multiple of this object's number of channels. // The length of this object is increased with the |length| divided by the // number of channels. - virtual void PushBackInterleaved(const T* append_this, size_t length); + virtual void PushBackInterleaved(const int16_t* append_this, size_t length); // Appends the contents of AudioMultiVector |append_this| to this object. The // length of this object is increased with the length of |append_this|. - virtual void PushBack(const AudioMultiVector& append_this); + virtual void PushBack(const AudioMultiVector& append_this); // Appends the contents of AudioMultiVector |append_this| to this object, // taken from |index| up until the end of |append_this|. The length of this // object is increased. - virtual void PushBackFromIndex(const AudioMultiVector& append_this, + virtual void PushBackFromIndex(const AudioMultiVector& append_this, size_t index); // Removes |length| elements from the beginning of this object, from each @@ -75,18 +75,18 @@ class AudioMultiVector { // returned, i.e., |length| * number of channels. If the AudioMultiVector // contains less than |length| samples per channel, this is reflected in the // return value. - virtual size_t ReadInterleaved(size_t length, T* destination) const; + virtual size_t ReadInterleaved(size_t length, int16_t* destination) const; // Like ReadInterleaved() above, but reads from |start_index| instead of from // the beginning. virtual size_t ReadInterleavedFromIndex(size_t start_index, size_t length, - T* destination) const; + int16_t* destination) const; // Like ReadInterleaved() above, but reads from the end instead of from // the beginning. virtual size_t ReadInterleavedFromEnd(size_t length, - T* destination) const; + int16_t* destination) const; // Overwrites each channel in this AudioMultiVector with values taken from // |insert_this|. The values are taken from the beginning of |insert_this| and @@ -95,18 +95,18 @@ class AudioMultiVector { // extends beyond the end of the current AudioVector, the vector is extended // to accommodate the new data. |length| is limited to the length of // |insert_this|. - virtual void OverwriteAt(const AudioMultiVector& insert_this, + virtual void OverwriteAt(const AudioMultiVector& insert_this, size_t length, size_t position); // Appends |append_this| to the end of the current vector. Lets the two // vectors overlap by |fade_length| samples (per channel), and cross-fade // linearly in this region. - virtual void CrossFade(const AudioMultiVector& append_this, + virtual void CrossFade(const AudioMultiVector& append_this, size_t fade_length); // Returns the number of channels. - virtual size_t Channels() const { return channels_.size(); } + virtual size_t Channels() const { return num_channels_; } // Returns the number of elements per channel in this AudioMultiVector. virtual size_t Size() const; @@ -119,11 +119,12 @@ class AudioMultiVector { // Accesses and modifies a channel (i.e., an AudioVector object) of this // AudioMultiVector. - const AudioVector& operator[](size_t index) const; - AudioVector& operator[](size_t index); + const AudioVector& operator[](size_t index) const; + AudioVector& operator[](size_t index); protected: - std::vector*> channels_; + std::vector channels_; + size_t num_channels_; private: DISALLOW_COPY_AND_ASSIGN(AudioMultiVector); diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_multi_vector_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_multi_vector_unittest.cc index d981ccb7cd61..be05a8260f9e 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_multi_vector_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_multi_vector_unittest.cc @@ -30,12 +30,10 @@ namespace webrtc { class AudioMultiVectorTest : public ::testing::TestWithParam { protected: - typedef int16_t T; // Use this value type for all tests. - AudioMultiVectorTest() : num_channels_(GetParam()), // Get the test parameter. interleaved_length_(num_channels_ * array_length()) { - array_interleaved_ = new T[num_channels_ * array_length()]; + array_interleaved_ = new int16_t[num_channels_ * array_length()]; } ~AudioMultiVectorTest() { @@ -45,9 +43,9 @@ class AudioMultiVectorTest : public ::testing::TestWithParam { virtual void SetUp() { // Populate test arrays. for (size_t i = 0; i < array_length(); ++i) { - array_[i] = static_cast(i); + array_[i] = static_cast(i); } - T* ptr = array_interleaved_; + int16_t* ptr = array_interleaved_; // Write 100, 101, 102, ... for first channel. // Write 200, 201, 202, ... for second channel. // And so on. @@ -65,20 +63,20 @@ class AudioMultiVectorTest : public ::testing::TestWithParam { const size_t num_channels_; size_t interleaved_length_; - T array_[10]; - T* array_interleaved_; + int16_t array_[10]; + int16_t* array_interleaved_; }; // Create and destroy AudioMultiVector objects, both empty and with a predefined // length. TEST_P(AudioMultiVectorTest, CreateAndDestroy) { - AudioMultiVector vec1(num_channels_); + AudioMultiVector vec1(num_channels_); EXPECT_TRUE(vec1.Empty()); EXPECT_EQ(num_channels_, vec1.Channels()); EXPECT_EQ(0u, vec1.Size()); size_t initial_size = 17; - AudioMultiVector vec2(num_channels_, initial_size); + AudioMultiVector vec2(num_channels_, initial_size); EXPECT_FALSE(vec2.Empty()); EXPECT_EQ(num_channels_, vec2.Channels()); EXPECT_EQ(initial_size, vec2.Size()); @@ -86,13 +84,13 @@ TEST_P(AudioMultiVectorTest, CreateAndDestroy) { // Test the subscript operator [] for getting and setting. TEST_P(AudioMultiVectorTest, SubscriptOperator) { - AudioMultiVector vec(num_channels_, array_length()); + AudioMultiVector vec(num_channels_, array_length()); for (size_t channel = 0; channel < num_channels_; ++channel) { for (size_t i = 0; i < array_length(); ++i) { - vec[channel][i] = static_cast(i); + vec[channel][i] = static_cast(i); // Make sure to use the const version. - const AudioVector& audio_vec = vec[channel]; - EXPECT_EQ(static_cast(i), audio_vec[i]); + const AudioVector& audio_vec = vec[channel]; + EXPECT_EQ(static_cast(i), audio_vec[i]); } } } @@ -100,9 +98,9 @@ TEST_P(AudioMultiVectorTest, SubscriptOperator) { // Test the PushBackInterleaved method and the CopyFrom method. The Clear // method is also invoked. TEST_P(AudioMultiVectorTest, PushBackInterleavedAndCopy) { - AudioMultiVector vec(num_channels_); + AudioMultiVector vec(num_channels_); vec.PushBackInterleaved(array_interleaved_, interleaved_length_); - AudioMultiVector vec_copy(num_channels_); + AudioMultiVector vec_copy(num_channels_); vec.CopyFrom(&vec_copy); // Copy from |vec| to |vec_copy|. ASSERT_EQ(num_channels_, vec.Channels()); ASSERT_EQ(array_length(), vec.Size()); @@ -110,7 +108,7 @@ TEST_P(AudioMultiVectorTest, PushBackInterleavedAndCopy) { ASSERT_EQ(array_length(), vec_copy.Size()); for (size_t channel = 0; channel < vec.Channels(); ++channel) { for (size_t i = 0; i < array_length(); ++i) { - EXPECT_EQ(static_cast((channel + 1) * 100 + i), vec[channel][i]); + EXPECT_EQ(static_cast((channel + 1) * 100 + i), vec[channel][i]); EXPECT_EQ(vec[channel][i], vec_copy[channel][i]); } } @@ -126,24 +124,25 @@ TEST_P(AudioMultiVectorTest, PushBackInterleavedAndCopy) { // Try to copy to a NULL pointer. Nothing should happen. TEST_P(AudioMultiVectorTest, CopyToNull) { - AudioMultiVector vec(num_channels_); - AudioMultiVector* vec_copy = NULL; + AudioMultiVector vec(num_channels_); + AudioMultiVector* vec_copy = NULL; vec.PushBackInterleaved(array_interleaved_, interleaved_length_); vec.CopyFrom(vec_copy); } // Test the PushBack method with another AudioMultiVector as input argument. TEST_P(AudioMultiVectorTest, PushBackVector) { - AudioMultiVector vec1(num_channels_, array_length()); - AudioMultiVector vec2(num_channels_, array_length()); + AudioMultiVector vec1(num_channels_, array_length()); + AudioMultiVector vec2(num_channels_, array_length()); // Set the first vector to [0, 1, ..., array_length() - 1] + // 100 * channel_number. // Set the second vector to [array_length(), array_length() + 1, ..., // 2 * array_length() - 1] + 100 * channel_number. for (size_t channel = 0; channel < num_channels_; ++channel) { for (size_t i = 0; i < array_length(); ++i) { - vec1[channel][i] = static_cast(i + 100 * channel); - vec2[channel][i] = static_cast(i + 100 * channel + array_length()); + vec1[channel][i] = static_cast(i + 100 * channel); + vec2[channel][i] = + static_cast(i + 100 * channel + array_length()); } } // Append vec2 to the back of vec1. @@ -151,16 +150,16 @@ TEST_P(AudioMultiVectorTest, PushBackVector) { ASSERT_EQ(2u * array_length(), vec1.Size()); for (size_t channel = 0; channel < num_channels_; ++channel) { for (size_t i = 0; i < 2 * array_length(); ++i) { - EXPECT_EQ(static_cast(i + 100 * channel), vec1[channel][i]); + EXPECT_EQ(static_cast(i + 100 * channel), vec1[channel][i]); } } } // Test the PushBackFromIndex method. TEST_P(AudioMultiVectorTest, PushBackFromIndex) { - AudioMultiVector vec1(num_channels_); + AudioMultiVector vec1(num_channels_); vec1.PushBackInterleaved(array_interleaved_, interleaved_length_); - AudioMultiVector vec2(num_channels_); + AudioMultiVector vec2(num_channels_); // Append vec1 to the back of vec2 (which is empty). Read vec1 from the second // last element. @@ -176,7 +175,7 @@ TEST_P(AudioMultiVectorTest, PushBackFromIndex) { // Starts with pushing some values to the vector, then test the Zeros method. TEST_P(AudioMultiVectorTest, Zeros) { - AudioMultiVector vec(num_channels_); + AudioMultiVector vec(num_channels_); vec.PushBackInterleaved(array_interleaved_, interleaved_length_); vec.Zeros(2 * array_length()); ASSERT_EQ(num_channels_, vec.Channels()); @@ -190,28 +189,30 @@ TEST_P(AudioMultiVectorTest, Zeros) { // Test the ReadInterleaved method TEST_P(AudioMultiVectorTest, ReadInterleaved) { - AudioMultiVector vec(num_channels_); + AudioMultiVector vec(num_channels_); vec.PushBackInterleaved(array_interleaved_, interleaved_length_); - T* output = new T[interleaved_length_]; + int16_t* output = new int16_t[interleaved_length_]; // Read 5 samples. size_t read_samples = 5; EXPECT_EQ(num_channels_ * read_samples, vec.ReadInterleaved(read_samples, output)); - EXPECT_EQ(0, memcmp(array_interleaved_, output, read_samples * sizeof(T))); + EXPECT_EQ(0, + memcmp(array_interleaved_, output, read_samples * sizeof(int16_t))); // Read too many samples. Expect to get all samples from the vector. EXPECT_EQ(interleaved_length_, vec.ReadInterleaved(array_length() + 1, output)); - EXPECT_EQ(0, memcmp(array_interleaved_, output, read_samples * sizeof(T))); + EXPECT_EQ(0, + memcmp(array_interleaved_, output, read_samples * sizeof(int16_t))); delete [] output; } // Try to read to a NULL pointer. Expected to return 0. TEST_P(AudioMultiVectorTest, ReadInterleavedToNull) { - AudioMultiVector vec(num_channels_); + AudioMultiVector vec(num_channels_); vec.PushBackInterleaved(array_interleaved_, interleaved_length_); - T* output = NULL; + int16_t* output = NULL; // Read 5 samples. size_t read_samples = 5; EXPECT_EQ(0u, vec.ReadInterleaved(read_samples, output)); @@ -219,13 +220,13 @@ TEST_P(AudioMultiVectorTest, ReadInterleavedToNull) { // Test the PopFront method. TEST_P(AudioMultiVectorTest, PopFront) { - AudioMultiVector vec(num_channels_); + AudioMultiVector vec(num_channels_); vec.PushBackInterleaved(array_interleaved_, interleaved_length_); vec.PopFront(1); // Remove one element from each channel. ASSERT_EQ(array_length() - 1u, vec.Size()); // Let |ptr| point to the second element of the first channel in the // interleaved array. - T* ptr = &array_interleaved_[num_channels_]; + int16_t* ptr = &array_interleaved_[num_channels_]; for (size_t i = 0; i < array_length() - 1; ++i) { for (size_t channel = 0; channel < num_channels_; ++channel) { EXPECT_EQ(*ptr, vec[channel][i]); @@ -238,13 +239,13 @@ TEST_P(AudioMultiVectorTest, PopFront) { // Test the PopBack method. TEST_P(AudioMultiVectorTest, PopBack) { - AudioMultiVector vec(num_channels_); + AudioMultiVector vec(num_channels_); vec.PushBackInterleaved(array_interleaved_, interleaved_length_); vec.PopBack(1); // Remove one element from each channel. ASSERT_EQ(array_length() - 1u, vec.Size()); // Let |ptr| point to the first element of the first channel in the // interleaved array. - T* ptr = array_interleaved_; + int16_t* ptr = array_interleaved_; for (size_t i = 0; i < array_length() - 1; ++i) { for (size_t channel = 0; channel < num_channels_; ++channel) { EXPECT_EQ(*ptr, vec[channel][i]); @@ -257,7 +258,7 @@ TEST_P(AudioMultiVectorTest, PopBack) { // Test the AssertSize method. TEST_P(AudioMultiVectorTest, AssertSize) { - AudioMultiVector vec(num_channels_, array_length()); + AudioMultiVector vec(num_channels_, array_length()); EXPECT_EQ(array_length(), vec.Size()); // Start with asserting with smaller sizes than already allocated. vec.AssertSize(0); @@ -276,16 +277,16 @@ TEST_P(AudioMultiVectorTest, AssertSize) { // Test the PushBack method with another AudioMultiVector as input argument. TEST_P(AudioMultiVectorTest, OverwriteAt) { - AudioMultiVector vec1(num_channels_); + AudioMultiVector vec1(num_channels_); vec1.PushBackInterleaved(array_interleaved_, interleaved_length_); - AudioMultiVector vec2(num_channels_); + AudioMultiVector vec2(num_channels_); vec2.Zeros(3); // 3 zeros in each channel. // Overwrite vec2 at position 5. vec1.OverwriteAt(vec2, 3, 5); // Verify result. // Length remains the same. ASSERT_EQ(array_length(), vec1.Size()); - T* ptr = array_interleaved_; + int16_t* ptr = array_interleaved_; for (size_t i = 0; i < array_length() - 1; ++i) { for (size_t channel = 0; channel < num_channels_; ++channel) { if (i >= 5 && i <= 7) { diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_vector.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_vector.cc index c1c2bc285bcb..cbd461630689 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_vector.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_vector.cc @@ -18,122 +18,109 @@ namespace webrtc { -template -void AudioVector::Clear() { - vector_.clear(); +void AudioVector::Clear() { + first_free_ix_ = 0; } -template -void AudioVector::CopyFrom(AudioVector* copy_to) const { +void AudioVector::CopyFrom(AudioVector* copy_to) const { if (copy_to) { - copy_to->vector_.assign(vector_.begin(), vector_.end()); + copy_to->Reserve(Size()); + assert(copy_to->capacity_ >= Size()); + memcpy(copy_to->array_.get(), array_.get(), Size() * sizeof(int16_t)); + copy_to->first_free_ix_ = first_free_ix_; } } -template -void AudioVector::PushFront(const AudioVector& prepend_this) { - vector_.insert(vector_.begin(), prepend_this.vector_.begin(), - prepend_this.vector_.end()); +void AudioVector::PushFront(const AudioVector& prepend_this) { + size_t insert_length = prepend_this.Size(); + Reserve(Size() + insert_length); + memmove(&array_[insert_length], &array_[0], Size() * sizeof(int16_t)); + memcpy(&array_[0], &prepend_this.array_[0], insert_length * sizeof(int16_t)); + first_free_ix_ += insert_length; } -template -void AudioVector::PushFront(const T* prepend_this, size_t length) { +void AudioVector::PushFront(const int16_t* prepend_this, size_t length) { // Same operation as InsertAt beginning. InsertAt(prepend_this, length, 0); } -template -void AudioVector::PushBack(const AudioVector& append_this) { - vector_.reserve(vector_.size() + append_this.Size()); - for (size_t i = 0; i < append_this.Size(); ++i) { - vector_.push_back(append_this[i]); - } +void AudioVector::PushBack(const AudioVector& append_this) { + PushBack(append_this.array_.get(), append_this.Size()); } -template -void AudioVector::PushBack(const T* append_this, size_t length) { - vector_.reserve(vector_.size() + length); - for (size_t i = 0; i < length; ++i) { - vector_.push_back(append_this[i]); - } +void AudioVector::PushBack(const int16_t* append_this, size_t length) { + Reserve(Size() + length); + memcpy(&array_[first_free_ix_], append_this, length * sizeof(int16_t)); + first_free_ix_ += length; } -template -void AudioVector::PopFront(size_t length) { - if (length >= vector_.size()) { +void AudioVector::PopFront(size_t length) { + if (length >= Size()) { // Remove all elements. - vector_.clear(); + Clear(); } else { - typename std::vector::iterator end_range = vector_.begin(); - end_range += length; - // Erase all elements in range vector_.begin() and |end_range| (not - // including |end_range|). - vector_.erase(vector_.begin(), end_range); + size_t remaining_samples = Size() - length; + memmove(&array_[0], &array_[length], remaining_samples * sizeof(int16_t)); + first_free_ix_ -= length; } } -template -void AudioVector::PopBack(size_t length) { - // Make sure that new_size is never negative (which causes wrap-around). - size_t new_size = vector_.size() - std::min(length, vector_.size()); - vector_.resize(new_size); +void AudioVector::PopBack(size_t length) { + // Never remove more than what is in the array. + length = std::min(length, Size()); + first_free_ix_ -= length; } -template -void AudioVector::Extend(size_t extra_length) { - vector_.insert(vector_.end(), extra_length, 0); +void AudioVector::Extend(size_t extra_length) { + Reserve(Size() + extra_length); + memset(&array_[first_free_ix_], 0, extra_length * sizeof(int16_t)); + first_free_ix_ += extra_length; } -template -void AudioVector::InsertAt(const T* insert_this, +void AudioVector::InsertAt(const int16_t* insert_this, + size_t length, + size_t position) { + Reserve(Size() + length); + // Cap the position at the current vector length, to be sure the iterator + // does not extend beyond the end of the vector. + position = std::min(Size(), position); + int16_t* insert_position_ptr = &array_[position]; + size_t samples_to_move = Size() - position; + memmove(insert_position_ptr + length, insert_position_ptr, + samples_to_move * sizeof(int16_t)); + memcpy(insert_position_ptr, insert_this, length * sizeof(int16_t)); + first_free_ix_ += length; +} + +void AudioVector::InsertZerosAt(size_t length, + size_t position) { + Reserve(Size() + length); + // Cap the position at the current vector length, to be sure the iterator + // does not extend beyond the end of the vector. + position = std::min(capacity_, position); + int16_t* insert_position_ptr = &array_[position]; + size_t samples_to_move = Size() - position; + memmove(insert_position_ptr + length, insert_position_ptr, + samples_to_move * sizeof(int16_t)); + memset(insert_position_ptr, 0, length * sizeof(int16_t)); + first_free_ix_ += length; +} + +void AudioVector::OverwriteAt(const int16_t* insert_this, size_t length, size_t position) { - typename std::vector::iterator insert_position = vector_.begin(); - // Cap the position at the current vector length, to be sure the iterator - // does not extend beyond the end of the vector. - position = std::min(vector_.size(), position); - insert_position += position; - // First, insert zeros at the position. This makes the vector longer (and - // invalidates the iterator |insert_position|. - vector_.insert(insert_position, length, 0); - // Write the new values into the vector. - for (size_t i = 0; i < length; ++i) { - vector_[position + i] = insert_this[i]; + // Cap the insert position at the current array length. + position = std::min(Size(), position); + Reserve(position + length); + memcpy(&array_[position], insert_this, length * sizeof(int16_t)); + if (position + length > Size()) { + // Array was expanded. + first_free_ix_ += position + length - Size(); } } -template -void AudioVector::InsertZerosAt(size_t length, - size_t position) { - typename std::vector::iterator insert_position = vector_.begin(); - // Cap the position at the current vector length, to be sure the iterator - // does not extend beyond the end of the vector. - position = std::min(vector_.size(), position); - insert_position += position; - // Insert zeros at the position. This makes the vector longer (and - // invalidates the iterator |insert_position|. - vector_.insert(insert_position, length, 0); -} - -template -void AudioVector::OverwriteAt(const T* insert_this, - size_t length, - size_t position) { - // Cap the insert position at the current vector length. - position = std::min(vector_.size(), position); - // Extend the vector if needed. (It is valid to overwrite beyond the current - // end of the vector.) - if (position + length > vector_.size()) { - Extend(position + length - vector_.size()); - } - for (size_t i = 0; i < length; ++i) { - vector_[position + i] = insert_this[i]; - } -} - -template -void AudioVector::CrossFade(const AudioVector& append_this, - size_t fade_length) { +void AudioVector::CrossFade(const AudioVector& append_this, + size_t fade_length) { // Fade length cannot be longer than the current vector or |append_this|. assert(fade_length <= Size()); assert(fade_length <= append_this.Size()); @@ -148,7 +135,7 @@ void AudioVector::CrossFade(const AudioVector& append_this, int alpha = 16384; for (size_t i = 0; i < fade_length; ++i) { alpha -= alpha_step; - vector_[position + i] = (alpha * vector_[position + i] + + array_[position + i] = (alpha * array_[position + i] + (16384 - alpha) * append_this[i] + 8192) >> 14; } assert(alpha >= 0); // Verify that the slope was correct. @@ -158,49 +145,21 @@ void AudioVector::CrossFade(const AudioVector& append_this, PushBack(&append_this[fade_length], samples_to_push_back); } -// Template specialization for double. The only difference is in the calculation -// of the cross-faded value, where we divide by 16384 instead of shifting with -// 14 steps, and also not adding 8192 before scaling. -template<> -void AudioVector::CrossFade(const AudioVector& append_this, - size_t fade_length) { - // Fade length cannot be longer than the current vector or |append_this|. - assert(fade_length <= Size()); - assert(fade_length <= append_this.Size()); - fade_length = std::min(fade_length, Size()); - fade_length = std::min(fade_length, append_this.Size()); - size_t position = Size() - fade_length; - // Cross fade the overlapping regions. - // |alpha| is the mixing factor in Q14. - // TODO(hlundin): Consider skipping +1 in the denominator to produce a - // smoother cross-fade, in particular at the end of the fade. - int alpha_step = 16384 / (static_cast(fade_length) + 1); - int alpha = 16384; - for (size_t i = 0; i < fade_length; ++i) { - alpha -= alpha_step; - vector_[position + i] = (alpha * vector_[position + i] + - (16384 - alpha) * append_this[i]) / 16384; +const int16_t& AudioVector::operator[](size_t index) const { + return array_[index]; +} + +int16_t& AudioVector::operator[](size_t index) { + return array_[index]; +} + +void AudioVector::Reserve(size_t n) { + if (capacity_ < n) { + scoped_ptr temp_array(new int16_t[n]); + memcpy(temp_array.get(), array_.get(), Size() * sizeof(int16_t)); + array_.swap(temp_array); + capacity_ = n; } - assert(alpha >= 0); // Verify that the slope was correct. - // Append what is left of |append_this|. - size_t samples_to_push_back = append_this.Size() - fade_length; - if (samples_to_push_back > 0) - PushBack(&append_this[fade_length], samples_to_push_back); } -template -const T& AudioVector::operator[](size_t index) const { - return vector_[index]; -} - -template -T& AudioVector::operator[](size_t index) { - return vector_[index]; -} - -// Instantiate the template for a few types. -template class AudioVector; -template class AudioVector; -template class AudioVector; - } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_vector.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_vector.h index 3fd23693d231..66bd518a806f 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_vector.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_vector.h @@ -13,21 +13,27 @@ #include // Access to size_t. -#include - #include "webrtc/system_wrappers/interface/constructor_magic.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/typedefs.h" namespace webrtc { -template class AudioVector { public: // Creates an empty AudioVector. - AudioVector() {} + AudioVector() + : array_(new int16_t[kDefaultInitialSize]), + first_free_ix_(0), + capacity_(kDefaultInitialSize) {} // Creates an AudioVector with an initial size. explicit AudioVector(size_t initial_size) - : vector_(initial_size, 0) {} + : array_(new int16_t[initial_size]), + first_free_ix_(initial_size), + capacity_(initial_size) { + memset(array_.get(), 0, initial_size * sizeof(int16_t)); + } virtual ~AudioVector() {} @@ -37,21 +43,21 @@ class AudioVector { // Copies all values from this vector to |copy_to|. Any contents in |copy_to| // are deleted before the copy operation. After the operation is done, // |copy_to| will be an exact replica of this object. - virtual void CopyFrom(AudioVector* copy_to) const; + virtual void CopyFrom(AudioVector* copy_to) const; // Prepends the contents of AudioVector |prepend_this| to this object. The // length of this object is increased with the length of |prepend_this|. - virtual void PushFront(const AudioVector& prepend_this); + virtual void PushFront(const AudioVector& prepend_this); // Same as above, but with an array |prepend_this| with |length| elements as // source. - virtual void PushFront(const T* prepend_this, size_t length); + virtual void PushFront(const int16_t* prepend_this, size_t length); // Same as PushFront but will append to the end of this object. - virtual void PushBack(const AudioVector& append_this); + virtual void PushBack(const AudioVector& append_this); // Same as PushFront but will append to the end of this object. - virtual void PushBack(const T* append_this, size_t length); + virtual void PushBack(const int16_t* append_this, size_t length); // Removes |length| elements from the beginning of this object. virtual void PopFront(size_t length); @@ -67,7 +73,8 @@ class AudioVector { // them at |position|. The length of the AudioVector is increased by |length|. // |position| = 0 means that the new values are prepended to the vector. // |position| = Size() means that the new values are appended to the vector. - virtual void InsertAt(const T* insert_this, size_t length, size_t position); + virtual void InsertAt(const int16_t* insert_this, size_t length, + size_t position); // Like InsertAt, but inserts |length| zero elements at |position|. virtual void InsertZerosAt(size_t length, size_t position); @@ -77,27 +84,34 @@ class AudioVector { // is the same as for InsertAt(). If |length| and |position| are selected // such that the new data extends beyond the end of the current AudioVector, // the vector is extended to accommodate the new data. - virtual void OverwriteAt(const T* insert_this, + virtual void OverwriteAt(const int16_t* insert_this, size_t length, size_t position); // Appends |append_this| to the end of the current vector. Lets the two // vectors overlap by |fade_length| samples, and cross-fade linearly in this // region. - virtual void CrossFade(const AudioVector& append_this, size_t fade_length); + virtual void CrossFade(const AudioVector& append_this, size_t fade_length); // Returns the number of elements in this AudioVector. - virtual size_t Size() const { return vector_.size(); } + virtual size_t Size() const { return first_free_ix_; } // Returns true if this AudioVector is empty. - virtual bool Empty() const { return vector_.empty(); } + virtual bool Empty() const { return (first_free_ix_ == 0); } // Accesses and modifies an element of AudioVector. - const T& operator[](size_t index) const; - T& operator[](size_t index); + const int16_t& operator[](size_t index) const; + int16_t& operator[](size_t index); private: - std::vector vector_; + static const size_t kDefaultInitialSize = 10; + + void Reserve(size_t n); + + scoped_ptr array_; + size_t first_free_ix_; // The first index after the last sample in array_. + // Note that this index may point outside of array_. + size_t capacity_; // Allocated number of samples in the array. DISALLOW_COPY_AND_ASSIGN(AudioVector); }; diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_vector_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_vector_unittest.cc index c5f1ba447445..de5aac2d955a 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_vector_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/audio_vector_unittest.cc @@ -20,22 +20,12 @@ namespace webrtc { -// The tests in this file are so called typed tests (see e.g., -// http://code.google.com/p/googletest/wiki/AdvancedGuide#Typed_Tests). -// This means that the tests are written with the typename T as an unknown -// template type. The tests are then instantiated for a few types; int16_t, -// int32_t and double in this case. Each test is then run once for each of these -// types. -// A few special tricks are needed. For instance, the member variable |array_| -// in the test fixture must be accessed using this->array_ in the tests. - -template class AudioVectorTest : public ::testing::Test { protected: virtual void SetUp() { // Populate test array. for (size_t i = 0; i < array_length(); ++i) { - array_[i] = static_cast(i); + array_[i] = i; } } @@ -43,48 +33,44 @@ class AudioVectorTest : public ::testing::Test { return sizeof(array_) / sizeof(array_[0]); } - T array_[10]; + int16_t array_[10]; }; -// Instantiate typed tests with int16_t, int32_t, and double. -typedef ::testing::Types MyTypes; -TYPED_TEST_CASE(AudioVectorTest, MyTypes); - // Create and destroy AudioVector objects, both empty and with a predefined // length. -TYPED_TEST(AudioVectorTest, CreateAndDestroy) { - AudioVector vec1; +TEST_F(AudioVectorTest, CreateAndDestroy) { + AudioVector vec1; EXPECT_TRUE(vec1.Empty()); EXPECT_EQ(0u, vec1.Size()); size_t initial_size = 17; - AudioVector vec2(initial_size); + AudioVector vec2(initial_size); EXPECT_FALSE(vec2.Empty()); EXPECT_EQ(initial_size, vec2.Size()); } // Test the subscript operator [] for getting and setting. -TYPED_TEST(AudioVectorTest, SubscriptOperator) { - AudioVector vec(this->array_length()); - for (size_t i = 0; i < this->array_length(); ++i) { - vec[i] = static_cast(i); - const TypeParam& value = vec[i]; // Make sure to use the const version. - EXPECT_EQ(static_cast(i), value); +TEST_F(AudioVectorTest, SubscriptOperator) { + AudioVector vec(array_length()); + for (size_t i = 0; i < array_length(); ++i) { + vec[i] = static_cast(i); + const int16_t& value = vec[i]; // Make sure to use the const version. + EXPECT_EQ(static_cast(i), value); } } // Test the PushBack method and the CopyFrom method. The Clear method is also // invoked. -TYPED_TEST(AudioVectorTest, PushBackAndCopy) { - AudioVector vec; - AudioVector vec_copy; - vec.PushBack(this->array_, this->array_length()); +TEST_F(AudioVectorTest, PushBackAndCopy) { + AudioVector vec; + AudioVector vec_copy; + vec.PushBack(array_, array_length()); vec.CopyFrom(&vec_copy); // Copy from |vec| to |vec_copy|. - ASSERT_EQ(this->array_length(), vec.Size()); - ASSERT_EQ(this->array_length(), vec_copy.Size()); - for (size_t i = 0; i < this->array_length(); ++i) { - EXPECT_EQ(this->array_[i], vec[i]); - EXPECT_EQ(this->array_[i], vec_copy[i]); + ASSERT_EQ(array_length(), vec.Size()); + ASSERT_EQ(array_length(), vec_copy.Size()); + for (size_t i = 0; i < array_length(); ++i) { + EXPECT_EQ(array_[i], vec[i]); + EXPECT_EQ(array_[i], vec_copy[i]); } // Clear |vec| and verify that it is empty. @@ -97,105 +83,105 @@ TYPED_TEST(AudioVectorTest, PushBackAndCopy) { } // Try to copy to a NULL pointer. Nothing should happen. -TYPED_TEST(AudioVectorTest, CopyToNull) { - AudioVector vec; - AudioVector* vec_copy = NULL; - vec.PushBack(this->array_, this->array_length()); +TEST_F(AudioVectorTest, CopyToNull) { + AudioVector vec; + AudioVector* vec_copy = NULL; + vec.PushBack(array_, array_length()); vec.CopyFrom(vec_copy); } // Test the PushBack method with another AudioVector as input argument. -TYPED_TEST(AudioVectorTest, PushBackVector) { +TEST_F(AudioVectorTest, PushBackVector) { static const size_t kLength = 10; - AudioVector vec1(kLength); - AudioVector vec2(kLength); + AudioVector vec1(kLength); + AudioVector vec2(kLength); // Set the first vector to [0, 1, ..., kLength - 1]. // Set the second vector to [kLength, kLength + 1, ..., 2 * kLength - 1]. for (size_t i = 0; i < kLength; ++i) { - vec1[i] = static_cast(i); - vec2[i] = static_cast(i + kLength); + vec1[i] = static_cast(i); + vec2[i] = static_cast(i + kLength); } // Append vec2 to the back of vec1. vec1.PushBack(vec2); ASSERT_EQ(2 * kLength, vec1.Size()); for (size_t i = 0; i < 2 * kLength; ++i) { - EXPECT_EQ(static_cast(i), vec1[i]); + EXPECT_EQ(static_cast(i), vec1[i]); } } // Test the PushFront method. -TYPED_TEST(AudioVectorTest, PushFront) { - AudioVector vec; - vec.PushFront(this->array_, this->array_length()); - ASSERT_EQ(this->array_length(), vec.Size()); - for (size_t i = 0; i < this->array_length(); ++i) { - EXPECT_EQ(this->array_[i], vec[i]); +TEST_F(AudioVectorTest, PushFront) { + AudioVector vec; + vec.PushFront(array_, array_length()); + ASSERT_EQ(array_length(), vec.Size()); + for (size_t i = 0; i < array_length(); ++i) { + EXPECT_EQ(array_[i], vec[i]); } } // Test the PushFront method with another AudioVector as input argument. -TYPED_TEST(AudioVectorTest, PushFrontVector) { +TEST_F(AudioVectorTest, PushFrontVector) { static const size_t kLength = 10; - AudioVector vec1(kLength); - AudioVector vec2(kLength); + AudioVector vec1(kLength); + AudioVector vec2(kLength); // Set the first vector to [0, 1, ..., kLength - 1]. // Set the second vector to [kLength, kLength + 1, ..., 2 * kLength - 1]. for (size_t i = 0; i < kLength; ++i) { - vec1[i] = static_cast(i); - vec2[i] = static_cast(i + kLength); + vec1[i] = static_cast(i); + vec2[i] = static_cast(i + kLength); } // Prepend vec1 to the front of vec2. vec2.PushFront(vec1); ASSERT_EQ(2 * kLength, vec2.Size()); for (size_t i = 0; i < 2 * kLength; ++i) { - EXPECT_EQ(static_cast(i), vec2[i]); + EXPECT_EQ(static_cast(i), vec2[i]); } } // Test the PopFront method. -TYPED_TEST(AudioVectorTest, PopFront) { - AudioVector vec; - vec.PushBack(this->array_, this->array_length()); +TEST_F(AudioVectorTest, PopFront) { + AudioVector vec; + vec.PushBack(array_, array_length()); vec.PopFront(1); // Remove one element. - EXPECT_EQ(this->array_length() - 1u, vec.Size()); - for (size_t i = 0; i < this->array_length() - 1; ++i) { - EXPECT_EQ(static_cast(i + 1), vec[i]); + EXPECT_EQ(array_length() - 1u, vec.Size()); + for (size_t i = 0; i < array_length() - 1; ++i) { + EXPECT_EQ(static_cast(i + 1), vec[i]); } - vec.PopFront(this->array_length()); // Remove more elements than vector size. + vec.PopFront(array_length()); // Remove more elements than vector size. EXPECT_EQ(0u, vec.Size()); } // Test the PopBack method. -TYPED_TEST(AudioVectorTest, PopBack) { - AudioVector vec; - vec.PushBack(this->array_, this->array_length()); +TEST_F(AudioVectorTest, PopBack) { + AudioVector vec; + vec.PushBack(array_, array_length()); vec.PopBack(1); // Remove one element. - EXPECT_EQ(this->array_length() - 1u, vec.Size()); - for (size_t i = 0; i < this->array_length() - 1; ++i) { - EXPECT_EQ(static_cast(i), vec[i]); + EXPECT_EQ(array_length() - 1u, vec.Size()); + for (size_t i = 0; i < array_length() - 1; ++i) { + EXPECT_EQ(static_cast(i), vec[i]); } - vec.PopBack(this->array_length()); // Remove more elements than vector size. + vec.PopBack(array_length()); // Remove more elements than vector size. EXPECT_EQ(0u, vec.Size()); } // Test the Extend method. -TYPED_TEST(AudioVectorTest, Extend) { - AudioVector vec; - vec.PushBack(this->array_, this->array_length()); +TEST_F(AudioVectorTest, Extend) { + AudioVector vec; + vec.PushBack(array_, array_length()); vec.Extend(5); // Extend with 5 elements, which should all be zeros. - ASSERT_EQ(this->array_length() + 5u, vec.Size()); + ASSERT_EQ(array_length() + 5u, vec.Size()); // Verify that all are zero. - for (size_t i = this->array_length(); i < this->array_length() + 5; ++i) { + for (size_t i = array_length(); i < array_length() + 5; ++i) { EXPECT_EQ(0, vec[i]); } } // Test the InsertAt method with an insert position in the middle of the vector. -TYPED_TEST(AudioVectorTest, InsertAt) { - AudioVector vec; - vec.PushBack(this->array_, this->array_length()); +TEST_F(AudioVectorTest, InsertAt) { + AudioVector vec; + vec.PushBack(array_, array_length()); static const int kNewLength = 5; - TypeParam new_array[kNewLength]; + int16_t new_array[kNewLength]; // Set array elements to {100, 101, 102, ... }. for (int i = 0; i < kNewLength; ++i) { new_array[i] = 100 + i; @@ -207,30 +193,30 @@ TYPED_TEST(AudioVectorTest, InsertAt) { // |insert_position|, |insert_position| + 1, ..., kLength - 1}. size_t pos = 0; for (int i = 0; i < insert_position; ++i) { - EXPECT_EQ(this->array_[i], vec[pos]); + EXPECT_EQ(array_[i], vec[pos]); ++pos; } for (int i = 0; i < kNewLength; ++i) { EXPECT_EQ(new_array[i], vec[pos]); ++pos; } - for (size_t i = insert_position; i < this->array_length(); ++i) { - EXPECT_EQ(this->array_[i], vec[pos]); + for (size_t i = insert_position; i < array_length(); ++i) { + EXPECT_EQ(array_[i], vec[pos]); ++pos; } } // Test the InsertZerosAt method with an insert position in the middle of the // vector. Use the InsertAt method as reference. -TYPED_TEST(AudioVectorTest, InsertZerosAt) { - AudioVector vec; - AudioVector vec_ref; - vec.PushBack(this->array_, this->array_length()); - vec_ref.PushBack(this->array_, this->array_length()); +TEST_F(AudioVectorTest, InsertZerosAt) { + AudioVector vec; + AudioVector vec_ref; + vec.PushBack(array_, array_length()); + vec_ref.PushBack(array_, array_length()); static const int kNewLength = 5; int insert_position = 5; vec.InsertZerosAt(kNewLength, insert_position); - TypeParam new_array[kNewLength] = {0}; // All zero elements. + int16_t new_array[kNewLength] = {0}; // All zero elements. vec_ref.InsertAt(new_array, kNewLength, insert_position); // Verify that the vectors are identical. ASSERT_EQ(vec_ref.Size(), vec.Size()); @@ -240,11 +226,11 @@ TYPED_TEST(AudioVectorTest, InsertZerosAt) { } // Test the InsertAt method with an insert position at the start of the vector. -TYPED_TEST(AudioVectorTest, InsertAtBeginning) { - AudioVector vec; - vec.PushBack(this->array_, this->array_length()); +TEST_F(AudioVectorTest, InsertAtBeginning) { + AudioVector vec; + vec.PushBack(array_, array_length()); static const int kNewLength = 5; - TypeParam new_array[kNewLength]; + int16_t new_array[kNewLength]; // Set array elements to {100, 101, 102, ... }. for (int i = 0; i < kNewLength; ++i) { new_array[i] = 100 + i; @@ -259,29 +245,29 @@ TYPED_TEST(AudioVectorTest, InsertAtBeginning) { EXPECT_EQ(new_array[i], vec[pos]); ++pos; } - for (size_t i = insert_position; i < this->array_length(); ++i) { - EXPECT_EQ(this->array_[i], vec[pos]); + for (size_t i = insert_position; i < array_length(); ++i) { + EXPECT_EQ(array_[i], vec[pos]); ++pos; } } // Test the InsertAt method with an insert position at the end of the vector. -TYPED_TEST(AudioVectorTest, InsertAtEnd) { - AudioVector vec; - vec.PushBack(this->array_, this->array_length()); +TEST_F(AudioVectorTest, InsertAtEnd) { + AudioVector vec; + vec.PushBack(array_, array_length()); static const int kNewLength = 5; - TypeParam new_array[kNewLength]; + int16_t new_array[kNewLength]; // Set array elements to {100, 101, 102, ... }. for (int i = 0; i < kNewLength; ++i) { new_array[i] = 100 + i; } - int insert_position = this->array_length(); + int insert_position = array_length(); vec.InsertAt(new_array, kNewLength, insert_position); // Verify that the vector looks as follows: // {0, 1, ..., kLength - 1, 100, 101, ..., 100 + kNewLength - 1 }. size_t pos = 0; - for (size_t i = 0; i < this->array_length(); ++i) { - EXPECT_EQ(this->array_[i], vec[pos]); + for (size_t i = 0; i < array_length(); ++i) { + EXPECT_EQ(array_[i], vec[pos]); ++pos; } for (int i = 0; i < kNewLength; ++i) { @@ -295,22 +281,22 @@ TYPED_TEST(AudioVectorTest, InsertAtEnd) { // an error. The expected outcome is the same as if the vector end was used as // input position. That is, the input position should be capped at the maximum // allowed value. -TYPED_TEST(AudioVectorTest, InsertBeyondEnd) { - AudioVector vec; - vec.PushBack(this->array_, this->array_length()); +TEST_F(AudioVectorTest, InsertBeyondEnd) { + AudioVector vec; + vec.PushBack(array_, array_length()); static const int kNewLength = 5; - TypeParam new_array[kNewLength]; + int16_t new_array[kNewLength]; // Set array elements to {100, 101, 102, ... }. for (int i = 0; i < kNewLength; ++i) { new_array[i] = 100 + i; } - int insert_position = this->array_length() + 10; // Too large. + int insert_position = array_length() + 10; // Too large. vec.InsertAt(new_array, kNewLength, insert_position); // Verify that the vector looks as follows: // {0, 1, ..., kLength - 1, 100, 101, ..., 100 + kNewLength - 1 }. size_t pos = 0; - for (size_t i = 0; i < this->array_length(); ++i) { - EXPECT_EQ(this->array_[i], vec[pos]); + for (size_t i = 0; i < array_length(); ++i) { + EXPECT_EQ(array_[i], vec[pos]); ++pos; } for (int i = 0; i < kNewLength; ++i) { @@ -321,11 +307,11 @@ TYPED_TEST(AudioVectorTest, InsertBeyondEnd) { // Test the OverwriteAt method with a position such that all of the new values // fit within the old vector. -TYPED_TEST(AudioVectorTest, OverwriteAt) { - AudioVector vec; - vec.PushBack(this->array_, this->array_length()); +TEST_F(AudioVectorTest, OverwriteAt) { + AudioVector vec; + vec.PushBack(array_, array_length()); static const int kNewLength = 5; - TypeParam new_array[kNewLength]; + int16_t new_array[kNewLength]; // Set array elements to {100, 101, 102, ... }. for (int i = 0; i < kNewLength; ++i) { new_array[i] = 100 + i; @@ -337,38 +323,38 @@ TYPED_TEST(AudioVectorTest, OverwriteAt) { // |insert_position|, |insert_position| + 1, ..., kLength - 1}. size_t pos = 0; for (pos = 0; pos < insert_position; ++pos) { - EXPECT_EQ(this->array_[pos], vec[pos]); + EXPECT_EQ(array_[pos], vec[pos]); } for (int i = 0; i < kNewLength; ++i) { EXPECT_EQ(new_array[i], vec[pos]); ++pos; } - for (; pos < this->array_length(); ++pos) { - EXPECT_EQ(this->array_[pos], vec[pos]); + for (; pos < array_length(); ++pos) { + EXPECT_EQ(array_[pos], vec[pos]); } } // Test the OverwriteAt method with a position such that some of the new values // extend beyond the end of the current vector. This is valid, and the vector is // expected to expand to accommodate the new values. -TYPED_TEST(AudioVectorTest, OverwriteBeyondEnd) { - AudioVector vec; - vec.PushBack(this->array_, this->array_length()); +TEST_F(AudioVectorTest, OverwriteBeyondEnd) { + AudioVector vec; + vec.PushBack(array_, array_length()); static const int kNewLength = 5; - TypeParam new_array[kNewLength]; + int16_t new_array[kNewLength]; // Set array elements to {100, 101, 102, ... }. for (int i = 0; i < kNewLength; ++i) { new_array[i] = 100 + i; } - int insert_position = this->array_length() - 2; + int insert_position = array_length() - 2; vec.OverwriteAt(new_array, kNewLength, insert_position); - ASSERT_EQ(this->array_length() - 2u + kNewLength, vec.Size()); + ASSERT_EQ(array_length() - 2u + kNewLength, vec.Size()); // Verify that the vector looks as follows: // {0, ..., |insert_position| - 1, 100, 101, ..., 100 + kNewLength - 1, // |insert_position|, |insert_position| + 1, ..., kLength - 1}. int pos = 0; for (pos = 0; pos < insert_position; ++pos) { - EXPECT_EQ(this->array_[pos], vec[pos]); + EXPECT_EQ(array_[pos], vec[pos]); } for (int i = 0; i < kNewLength; ++i) { EXPECT_EQ(new_array[i], vec[pos]); @@ -378,11 +364,11 @@ TYPED_TEST(AudioVectorTest, OverwriteBeyondEnd) { EXPECT_EQ(vec.Size(), static_cast(pos)); } -TYPED_TEST(AudioVectorTest, CrossFade) { +TEST_F(AudioVectorTest, CrossFade) { static const size_t kLength = 100; static const size_t kFadeLength = 10; - AudioVector vec1(kLength); - AudioVector vec2(kLength); + AudioVector vec1(kLength); + AudioVector vec2(kLength); // Set all vector elements to 0 in |vec1| and 100 in |vec2|. for (size_t i = 0; i < kLength; ++i) { vec1[i] = 0; diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/background_noise.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/background_noise.cc index f30646670e6f..2dfb3c1f3924 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/background_noise.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/background_noise.cc @@ -38,7 +38,7 @@ void BackgroundNoise::Reset() { // Keep _bgnMode as it is. } -void BackgroundNoise::Update(const AudioMultiVector& input, +void BackgroundNoise::Update(const AudioMultiVector& input, const PostDecodeVad& vad) { if (vad.running() && vad.active_speech()) { // Do not update the background noise parameters if we know that the signal diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/background_noise.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/background_noise.h index 16848c57c646..ac5446bf7f67 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/background_noise.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/background_noise.h @@ -14,6 +14,7 @@ #include // size_t #include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h" +#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h" #include "webrtc/system_wrappers/interface/constructor_magic.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" #include "webrtc/typedefs.h" @@ -26,12 +27,6 @@ class PostDecodeVad; // This class handles estimation of background noise parameters. class BackgroundNoise { public: - enum BackgroundNoiseMode { - kBgnOn, // Default behavior with eternal noise. - kBgnFade, // Noise fades to zero after some time. - kBgnOff // Background noise is always zero. - }; - // TODO(hlundin): For 48 kHz support, increase kMaxLpcOrder to 10. // Will work anyway, but probably sound a little worse. static const int kMaxLpcOrder = 8; // 32000 / 8000 + 4. @@ -43,7 +38,7 @@ class BackgroundNoise { // Updates the parameter estimates based on the signal currently in the // |sync_buffer|, and on the latest decision in |vad| if it is running. - void Update(const AudioMultiVector& sync_buffer, + void Update(const AudioMultiVector& sync_buffer, const PostDecodeVad& vad); // Returns |energy_| for |channel|. @@ -73,7 +68,11 @@ class BackgroundNoise { // Accessors. bool initialized() const { return initialized_; } - BackgroundNoiseMode mode() const { return mode_; } + NetEqBackgroundNoiseMode mode() const { return mode_; } + + // Sets the mode of the background noise playout for cases when there is long + // duration of packet loss. + void set_mode(NetEqBackgroundNoiseMode mode) { mode_ = mode; } private: static const int kThresholdIncrement = 229; // 0.0035 in Q16. @@ -129,7 +128,7 @@ class BackgroundNoise { size_t num_channels_; scoped_array channel_parameters_; bool initialized_; - BackgroundNoiseMode mode_; + NetEqBackgroundNoiseMode mode_; DISALLOW_COPY_AND_ASSIGN(BackgroundNoise); }; diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/comfort_noise.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/comfort_noise.cc index a56d9677e141..360767af1931 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/comfort_noise.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/comfort_noise.cc @@ -50,7 +50,7 @@ int ComfortNoise::UpdateParameters(Packet* packet) { } int ComfortNoise::Generate(size_t requested_length, - AudioMultiVector* output) { + AudioMultiVector* output) { // TODO(hlundin): Change to an enumerator and skip assert. assert(fs_hz_ == 8000 || fs_hz_ == 16000 || fs_hz_ == 32000 || fs_hz_ == 48000); diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/comfort_noise.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/comfort_noise.h index af0501f4b7a5..7e7c294ff0ee 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/comfort_noise.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/comfort_noise.h @@ -53,7 +53,7 @@ class ComfortNoise { // |output|. If this is the first in call after Reset (or first after creating // the object), it will also mix in comfort noise at the end of the // SyncBuffer object provided in the constructor. - int Generate(size_t requested_length, AudioMultiVector* output); + int Generate(size_t requested_length, AudioMultiVector* output); // Returns the last error code that was produced by the comfort noise // decoder. Returns 0 if no error has been encountered since the last reset. diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/decision_logic.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/decision_logic.cc index 58accfd1b3f5..04b886a2e2e6 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/decision_logic.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/decision_logic.cc @@ -128,7 +128,7 @@ Operations DecisionLogic::GetDecision(const SyncBuffer& sync_buffer, const int cur_size_samples = samples_left + packet_buffer_.NumSamplesInBuffer(decoder_database_, decoder_frame_length); - NETEQ_LOG_VERBOSE << "Buffers: " << packet_buffer_.NumPacketsInBuffer() << + LOG(LS_VERBOSE) << "Buffers: " << packet_buffer_.NumPacketsInBuffer() << " packets * " << decoder_frame_length << " samples/packet + " << samples_left << " samples in sync buffer = " << cur_size_samples; diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/decision_logic_normal.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/decision_logic_normal.cc index bd9a7c466a64..a70f23b7621f 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/decision_logic_normal.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/decision_logic_normal.cc @@ -44,7 +44,7 @@ Operations DecisionLogicNormal::GetDecisionSpecialized( uint32_t target_timestamp = sync_buffer.end_timestamp(); uint32_t available_timestamp = 0; - int is_cng_packet = 0; + bool is_cng_packet = false; if (packet_header) { available_timestamp = packet_header->timestamp; is_cng_packet = @@ -70,8 +70,7 @@ Operations DecisionLogicNormal::GetDecisionSpecialized( // Check if the required packet is available. if (target_timestamp == available_timestamp) { return ExpectedPacketAvailable(prev_mode, play_dtmf); - } else if (available_timestamp > target_timestamp) { - // TODO(hlundin): Consider wrap-around too? + } else if (IsNewerTimestamp(available_timestamp, target_timestamp)) { return FuturePacketAvailable(sync_buffer, expand, decoder_frame_length, prev_mode, target_timestamp, available_timestamp, play_dtmf); diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/defines.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/defines.h index 67b7cde3e0ef..b6f9eb2bc145 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/defines.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/defines.h @@ -47,11 +47,5 @@ enum Modes { kModeUndefined = -1 }; -#ifdef NETEQ4_VERBOSE_LOGGING -#define NETEQ_LOG_VERBOSE LOG(LS_VERBOSE) -#else -#define NETEQ_LOG_VERBOSE while(false)LOG(LS_VERBOSE) -#endif - } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DEFINES_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/delay_manager.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/delay_manager.cc index 63ed52506a7b..e80b9de51429 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/delay_manager.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/delay_manager.cc @@ -17,6 +17,7 @@ #include "webrtc/common_audio/signal_processing/include/signal_processing_library.h" #include "webrtc/modules/audio_coding/neteq4/delay_peak_detector.h" +#include "webrtc/modules/interface/module_common_types.h" #include "webrtc/system_wrappers/interface/logging.h" namespace webrtc { @@ -85,10 +86,9 @@ int DelayManager::Update(uint16_t sequence_number, } // Try calculating packet length from current and previous timestamps. - // TODO(hlundin): Take care of wrap-around. Not done yet due to legacy - // bit-exactness. int packet_len_ms; - if ((timestamp <= last_timestamp_) || (sequence_number <= last_seq_no_)) { + if (!IsNewerTimestamp(timestamp, last_timestamp_) || + !IsNewerSequenceNumber(sequence_number, last_seq_no_)) { // Wrong timestamp or sequence order; use stored value. packet_len_ms = packet_len_ms_; } else { @@ -111,18 +111,14 @@ int DelayManager::Update(uint16_t sequence_number, } // Check for discontinuous packet sequence and re-ordering. - if (sequence_number > last_seq_no_ + 1) { - // TODO(hlundin): Take care of wrap-around. Not done yet due to legacy - // bit-exactness. + if (IsNewerSequenceNumber(sequence_number, last_seq_no_ + 1)) { // Compensate for gap in the sequence numbers. Reduce IAT with the // expected extra time due to lost packets, but ensure that the IAT is // not negative. - iat_packets -= sequence_number - last_seq_no_ - 1; + iat_packets -= static_cast(sequence_number - last_seq_no_ - 1); iat_packets = std::max(iat_packets, 0); - } else if (sequence_number < last_seq_no_) { - // TODO(hlundin): Take care of wrap-around. - // Compensate for re-ordering. - iat_packets += last_seq_no_ + 1 - sequence_number; + } else if (!IsNewerSequenceNumber(sequence_number, last_seq_no_)) { + iat_packets += static_cast(last_seq_no_ + 1 - sequence_number); } // Saturate IAT at maximum value. diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/dsp_helper.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/dsp_helper.cc index 7493e10c5ce6..e1aa0e53de70 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/dsp_helper.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/dsp_helper.cc @@ -80,7 +80,7 @@ int DspHelper::RampSignal(int16_t* signal, return RampSignal(signal, length, factor, increment, signal); } -int DspHelper::RampSignal(AudioMultiVector* signal, +int DspHelper::RampSignal(AudioMultiVector* signal, size_t start_index, size_t length, int factor, diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/dsp_helper.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/dsp_helper.h index 9df6fd366dff..60cd995d8404 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/dsp_helper.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/dsp_helper.h @@ -67,7 +67,7 @@ class DspHelper { // Same as above, but processes |length| samples from |signal|, starting at // |start_index|. - static int RampSignal(AudioMultiVector* signal, + static int RampSignal(AudioMultiVector* signal, size_t start_index, size_t length, int factor, diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/dsp_helper_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/dsp_helper_unittest.cc index d3c76dfe2064..852c2ec927cd 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/dsp_helper_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/dsp_helper_unittest.cc @@ -48,7 +48,7 @@ TEST(DspHelper, RampSignalArray) { TEST(DspHelper, RampSignalAudioMultiVector) { static const int kLen = 100; static const int kChannels = 5; - AudioMultiVector input(kChannels, kLen * 3); + AudioMultiVector input(kChannels, kLen * 3); // Fill input with 1000. for (int i = 0; i < kLen * 3; ++i) { for (int channel = 0; channel < kChannels; ++channel) { diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.cc index f8b13aa25ebe..c85534e9b7fe 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.cc @@ -150,7 +150,7 @@ void DtmfToneGenerator::Reset() { // Generate num_samples of DTMF signal and write to |output|. int DtmfToneGenerator::Generate(int num_samples, - AudioMultiVector* output) { + AudioMultiVector* output) { if (!initialized_) { return kNotInitialized; } diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.h index 60500ec60725..e93f0b883f50 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.h @@ -31,7 +31,7 @@ class DtmfToneGenerator { virtual ~DtmfToneGenerator() {} virtual int Init(int fs, int event, int attenuation); virtual void Reset(); - virtual int Generate(int num_samples, AudioMultiVector* output); + virtual int Generate(int num_samples, AudioMultiVector* output); virtual bool initialized() const { return initialized_; } private: diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator_unittest.cc index 393648c499f2..37e8bbda96c3 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator_unittest.cc @@ -27,7 +27,7 @@ TEST(DtmfToneGenerator, CreateAndDestroy) { TEST(DtmfToneGenerator, TestErrors) { DtmfToneGenerator tone_gen; const int kNumSamples = 10; - AudioMultiVector signal(1); // One channel. + AudioMultiVector signal(1); // One channel. // Try to generate tones without initializing. EXPECT_EQ(DtmfToneGenerator::kNotInitialized, @@ -62,7 +62,7 @@ TEST(DtmfToneGenerator, TestTones) { DtmfToneGenerator tone_gen; const int kAttenuation = 0; const int kNumSamples = 10; - AudioMultiVector signal(1); // One channel. + AudioMultiVector signal(1); // One channel. // Low and high frequencies for events 0 through 15. const double low_freq_hz[] = { 941.0, 697.0, 697.0, 697.0, 770.0, 770.0, @@ -106,8 +106,8 @@ TEST(DtmfToneGenerator, TestTones) { TEST(DtmfToneGenerator, TestAmplitudes) { DtmfToneGenerator tone_gen; const int kNumSamples = 10; - AudioMultiVector signal(1); // One channel. - AudioMultiVector ref_signal(1); // One channel. + AudioMultiVector signal(1); // One channel. + AudioMultiVector ref_signal(1); // One channel. const int fs_vec[] = { 8000, 16000, 32000, 48000 }; const int event_vec[] = { 0, 4, 9, 13 }; // Test a few events. diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/expand.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/expand.cc index 2d6dfb5c1ae0..cba99243da8a 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/expand.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/expand.cc @@ -34,7 +34,7 @@ void Expand::Reset() { } } -int Expand::Process(AudioMultiVector* output) { +int Expand::Process(AudioMultiVector* output) { int16_t random_vector[kMaxSampleRate / 8000 * 120 + 30]; int16_t scaled_random_vector[kMaxSampleRate / 8000 * 125]; static const int kTempDataSize = 3600; @@ -294,8 +294,8 @@ int Expand::Process(AudioMultiVector* output) { // Unmute the background noise. int16_t bgn_mute_factor = background_noise_->MuteFactor(channel_ix); - BackgroundNoise::BackgroundNoiseMode bgn_mode = background_noise_->mode(); - if (bgn_mode == BackgroundNoise::kBgnFade && + NetEqBackgroundNoiseMode bgn_mode = background_noise_->mode(); + if (bgn_mode == kBgnFade && consecutive_expands_ >= kMaxConsecutiveExpands && bgn_mute_factor > 0) { // Fade BGN to zero. @@ -317,8 +317,8 @@ int Expand::Process(AudioMultiVector* output) { } else if (bgn_mute_factor < 16384) { // If mode is kBgnOff, or if kBgnFade has started fading, // Use regular |mute_slope|. - if (!stop_muting_ && bgn_mode != BackgroundNoise::kBgnOff && - !(bgn_mode == BackgroundNoise::kBgnFade && + if (!stop_muting_ && bgn_mode != kBgnOff && + !(bgn_mode == kBgnFade && consecutive_expands_ >= kMaxConsecutiveExpands)) { DspHelper::UnmuteSignal(noise_vector, static_cast(current_lag), &bgn_mute_factor, parameters.mute_slope, @@ -415,9 +415,11 @@ void Expand::AnalyzeSignal(int16_t* random_vector) { // Calculate correlation in downsampled domain (4 kHz sample rate). int16_t correlation_scale; - int correlation_length = Correlation(audio_history, signal_length, - correlation_vector, &correlation_scale); - correlation_length = 51; // TODO(hlundin): Legacy bit-exactness. + int correlation_length = 51; // TODO(hlundin): Legacy bit-exactness. + // If it is decided to break bit-exactness |correlation_length| should be + // initialized to the return value of Correlation(). + Correlation(audio_history, signal_length, correlation_vector, + &correlation_scale); // Find peaks in correlation vector. DspHelper::PeakDetection(correlation_vector, correlation_length, @@ -449,7 +451,7 @@ void Expand::AnalyzeSignal(int16_t* random_vector) { // Find the maximizing index |i| of the cost function // f[i] = best_correlation[i] / best_distortion[i]. - int32_t best_ratio = -1; + int32_t best_ratio = std::numeric_limits::min(); int best_index = -1; for (int i = 0; i < kNumCorrelationCandidates; ++i) { int32_t ratio; @@ -862,4 +864,14 @@ void Expand::UpdateLagIndex() { } } +Expand* ExpandFactory::Create(BackgroundNoise* background_noise, + SyncBuffer* sync_buffer, + RandomVector* random_vector, + int fs, + size_t num_channels) const { + return new Expand(background_noise, sync_buffer, random_vector, fs, + num_channels); +} + + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/expand.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/expand.h index b51856fe9dfb..4de8d7c55ba0 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/expand.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/expand.h @@ -61,7 +61,7 @@ class Expand { // The main method to produce concealment data. The data is appended to the // end of |output|. - int Process(AudioMultiVector* output); + int Process(AudioMultiVector* output); // Prepare the object to do extra expansion during normal operation following // a period of expands. @@ -116,8 +116,8 @@ class Expand { int16_t ar_gain_scale; int16_t voice_mix_factor; /* Q14 */ int16_t current_voice_mix_factor; /* Q14 */ - AudioVector expand_vector0; - AudioVector expand_vector1; + AudioVector expand_vector0; + AudioVector expand_vector1; bool onset; int16_t mute_slope; /* Q20 */ }; @@ -139,9 +139,9 @@ class Expand { SyncBuffer* sync_buffer_; RandomVector* random_vector_; bool first_expand_; - int fs_hz_; - size_t num_channels_; - size_t overlap_length_; + const int fs_hz_; + const size_t num_channels_; + const size_t overlap_length_; int consecutive_expands_; int16_t max_lag_; size_t expand_lags_[kNumLags]; @@ -153,5 +153,16 @@ class Expand { DISALLOW_COPY_AND_ASSIGN(Expand); }; +struct ExpandFactory { + ExpandFactory() {} + virtual ~ExpandFactory() {} + + virtual Expand* Create(BackgroundNoise* background_noise, + SyncBuffer* sync_buffer, + RandomVector* random_vector, + int fs, + size_t num_channels) const; +}; + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_EXPAND_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/expand_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/expand_unittest.cc index a63ed142f03f..353af2cf4ee7 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/expand_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/expand_unittest.cc @@ -28,6 +28,19 @@ TEST(Expand, CreateAndDestroy) { Expand expand(&bgn, &sync_buffer, &random_vector, fs, channels); } +TEST(Expand, CreateUsingFactory) { + int fs = 8000; + size_t channels = 1; + BackgroundNoise bgn(channels); + SyncBuffer sync_buffer(1, 1000); + RandomVector random_vector; + ExpandFactory expand_factory; + Expand* expand = + expand_factory.Create(&bgn, &sync_buffer, &random_vector, fs, channels); + EXPECT_TRUE(expand != NULL); + delete expand; +} + // TODO(hlundin): Write more tests. } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h index f89f887f941e..f3bcc711f367 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h @@ -124,7 +124,7 @@ class AudioDecoder { // applicable (e.g., for RED and DTMF/AVT types). static AudioDecoder* CreateAudioDecoder(NetEqDecoder codec_type); - size_t channels() { return channels_; } + size_t channels() const { return channels_; } protected: static SpeechType ConvertSpeechType(int16_t type); diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/interface/neteq.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/interface/neteq.h index 7c39cb1f4c97..617393093f3d 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/interface/neteq.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/interface/neteq.h @@ -15,6 +15,7 @@ #include +#include "webrtc/common_types.h" #include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h" #include "webrtc/system_wrappers/interface/constructor_magic.h" #include "webrtc/typedefs.h" @@ -24,14 +25,6 @@ namespace webrtc { // Forward declarations. struct WebRtcRTPHeader; -// RTCP statistics. -struct RtcpStatistics { - uint16_t fraction_lost; - uint32_t cumulative_lost; - uint32_t extended_max; - uint32_t jitter; -}; - struct NetEqNetworkStatistics { uint16_t current_buffer_size_ms; // Current jitter buffer size in ms. uint16_t preferred_buffer_size_ms; // Target buffer size in ms. @@ -66,9 +59,9 @@ enum NetEqPlayoutMode { }; enum NetEqBackgroundNoiseMode { - kBgnOn, - kBgnFade, - kBgnOff + kBgnOn, // Default behavior with eternal noise. + kBgnFade, // Noise fades to zero after some time. + kBgnOff // Background noise is always zero. }; // This is the interface class for NetEq. @@ -105,7 +98,8 @@ class NetEq { kFrameSplitError, kRedundancySplitError, kPacketBufferCorruption, - kOversizePacket + kOversizePacket, + kSyncPacketNotAccepted }; static const int kMaxNumPacketsInBuffer = 240; // TODO(hlundin): Remove. @@ -127,6 +121,18 @@ class NetEq { int length_bytes, uint32_t receive_timestamp) = 0; + // Inserts a sync-packet into packet queue. Sync-packets are decoded to + // silence and are intended to keep AV-sync intact in an event of long packet + // losses when Video NACK is enabled but Audio NACK is not. Clients of NetEq + // might insert sync-packet when they observe that buffer level of NetEq is + // decreasing below a certain threshold, defined by the application. + // Sync-packets should have the same payload type as the last audio payload + // type, i.e. they cannot have DTMF or CNG payload type, nor a codec change + // can be implied by inserting a sync-packet. + // Returns kOk on success, kFail on failure. + virtual int InsertSyncPacket(const WebRtcRTPHeader& rtp_header, + uint32_t receive_timestamp) = 0; + // Instructs NetEq to deliver 10 ms of audio data. The data is written to // |output_audio|, which can hold (at least) |max_length| elements. // The number of channels that were written to the output is provided in @@ -241,14 +247,13 @@ class NetEq { // Get sequence number and timestamp of the latest RTP. // This method is to facilitate NACK. - virtual int DecodedRtpInfo(int* sequence_number, uint32_t* timestamp) = 0; - - // Not implemented. - virtual int InsertSyncPacket(const WebRtcRTPHeader& rtp_header, - uint32_t receive_timestamp) = 0; + virtual int DecodedRtpInfo(int* sequence_number, + uint32_t* timestamp) const = 0; + // Sets the background noise mode. virtual void SetBackgroundNoiseMode(NetEqBackgroundNoiseMode mode) = 0; + // Gets the background noise mode. virtual NetEqBackgroundNoiseMode BackgroundNoiseMode() const = 0; protected: diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/merge.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/merge.cc index c3c8b48eaaef..463b2ca784c7 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/merge.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/merge.cc @@ -25,7 +25,7 @@ namespace webrtc { int Merge::Process(int16_t* input, size_t input_length, int16_t* external_mute_factor_array, - AudioMultiVector* output) { + AudioMultiVector* output) { // TODO(hlundin): Change to an enumerator and skip assert. assert(fs_hz_ == 8000 || fs_hz_ == 16000 || fs_hz_ == 32000 || fs_hz_ == 48000); @@ -37,7 +37,7 @@ int Merge::Process(int16_t* input, size_t input_length, int expanded_length = GetExpandedSignal(&old_length, &expand_period); // Transfer input signal to an AudioMultiVector. - AudioMultiVector input_vector(num_channels_); + AudioMultiVector input_vector(num_channels_); input_vector.PushBackInterleaved(input, input_length); size_t input_length_per_channel = input_vector.Size(); assert(input_length_per_channel == input_length / num_channels_); @@ -162,7 +162,7 @@ int Merge::GetExpandedSignal(int* old_length, int* expand_period) { // This assert should always be true thanks to the if statement above. assert(210 * kMaxSampleRate / 8000 - *old_length >= 0); - AudioMultiVector expanded_temp(num_channels_); + AudioMultiVector expanded_temp(num_channels_); expand_->Process(&expanded_temp); *expand_period = static_cast(expanded_temp.Size()); // Samples per // channel. diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/merge.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/merge.h index bb81e20bb7ad..f1f64e6c538c 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/merge.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/merge.h @@ -53,7 +53,7 @@ class Merge { // must have |num_channels_| elements. int Process(int16_t* input, size_t input_length, int16_t* external_mute_factor_array, - AudioMultiVector* output); + AudioMultiVector* output); private: static const int kMaxSampleRate = 48000; @@ -95,7 +95,7 @@ class Merge { SyncBuffer* sync_buffer_; int16_t expanded_downsampled_[kExpandDownsampLength]; int16_t input_downsampled_[kInputDownsampLength]; - AudioMultiVector expanded_; + AudioMultiVector expanded_; DISALLOW_COPY_AND_ASSIGN(Merge); }; diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/mock/mock_dtmf_tone_generator.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/mock/mock_dtmf_tone_generator.h index f8ab56f365a5..d34f7470ef28 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/mock/mock_dtmf_tone_generator.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/mock/mock_dtmf_tone_generator.h @@ -26,7 +26,7 @@ class MockDtmfToneGenerator : public DtmfToneGenerator { MOCK_METHOD0(Reset, void()); MOCK_METHOD2(Generate, - int(int num_samples, AudioMultiVector* output)); + int(int num_samples, AudioMultiVector* output)); MOCK_CONST_METHOD0(initialized, bool()); }; diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq.cc index 1ec71a2a6fb9..a64f01b2565b 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq.cc @@ -10,15 +10,18 @@ #include "webrtc/modules/audio_coding/neteq4/interface/neteq.h" +#include "webrtc/modules/audio_coding/neteq4/accelerate.h" #include "webrtc/modules/audio_coding/neteq4/buffer_level_filter.h" #include "webrtc/modules/audio_coding/neteq4/decoder_database.h" #include "webrtc/modules/audio_coding/neteq4/delay_manager.h" #include "webrtc/modules/audio_coding/neteq4/delay_peak_detector.h" #include "webrtc/modules/audio_coding/neteq4/dtmf_buffer.h" #include "webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.h" +#include "webrtc/modules/audio_coding/neteq4/expand.h" #include "webrtc/modules/audio_coding/neteq4/neteq_impl.h" #include "webrtc/modules/audio_coding/neteq4/packet_buffer.h" #include "webrtc/modules/audio_coding/neteq4/payload_splitter.h" +#include "webrtc/modules/audio_coding/neteq4/preemptive_expand.h" #include "webrtc/modules/audio_coding/neteq4/timestamp_scaler.h" namespace webrtc { @@ -37,6 +40,10 @@ NetEq* NetEq::Create(int sample_rate_hz) { kMaxBytesInBuffer); PayloadSplitter* payload_splitter = new PayloadSplitter; TimestampScaler* timestamp_scaler = new TimestampScaler(*decoder_database); + AccelerateFactory* accelerate_factory = new AccelerateFactory; + ExpandFactory* expand_factory = new ExpandFactory; + PreemptiveExpandFactory* preemptive_expand_factory = + new PreemptiveExpandFactory; return new NetEqImpl(sample_rate_hz, buffer_level_filter, decoder_database, @@ -46,7 +53,10 @@ NetEq* NetEq::Create(int sample_rate_hz) { dtmf_tone_generator, packet_buffer, payload_splitter, - timestamp_scaler); + timestamp_scaler, + accelerate_factory, + expand_factory, + preemptive_expand_factory); } } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq.gypi b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq.gypi index 1f6af7820dcf..87aaacf01d90 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq.gypi +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq.gypi @@ -47,10 +47,12 @@ ], 'include_dirs': [ 'interface', + '<(webrtc_root)', ], 'direct_dependent_settings': { 'include_dirs': [ 'interface', + '<(webrtc_root)', ], }, 'sources': [ @@ -169,7 +171,7 @@ 'dependencies': [ '<(DEPTH)/testing/gmock.gyp:gmock', '<(DEPTH)/testing/gtest.gyp:gtest', - '<(webrtc_root)/test/test.gyp:test_support_main', + 'PCM16B', # Needed by neteq_performance_test. ], 'direct_dependent_settings': { 'include_dirs': [ @@ -184,6 +186,8 @@ 'tools/audio_loop.h', 'tools/input_audio_file.cc', 'tools/input_audio_file.h', + 'tools/neteq_performance_test.cc', + 'tools/neteq_performance_test.h', 'tools/rtp_generator.cc', 'tools/rtp_generator.h', ], @@ -209,10 +213,10 @@ 'target_name': 'audio_decoder_unittests_run', 'type': 'none', 'dependencies': [ - '<(import_isolate_path):import_isolate_gypi', 'audio_decoder_unittests', ], 'includes': [ + '../../../build/isolate.gypi', 'audio_decoder_unittests.isolate', ], 'sources': [ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq_impl.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq_impl.cc index 7620a7f4dc59..d6fce18cc0a0 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq_impl.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq_impl.cc @@ -58,9 +58,11 @@ NetEqImpl::NetEqImpl(int fs, DtmfToneGenerator* dtmf_tone_generator, PacketBuffer* packet_buffer, PayloadSplitter* payload_splitter, - TimestampScaler* timestamp_scaler) - : background_noise_(NULL), - buffer_level_filter_(buffer_level_filter), + TimestampScaler* timestamp_scaler, + AccelerateFactory* accelerate_factory, + ExpandFactory* expand_factory, + PreemptiveExpandFactory* preemptive_expand_factory) + : buffer_level_filter_(buffer_level_filter), decoder_database_(decoder_database), delay_manager_(delay_manager), delay_peak_detector_(delay_peak_detector), @@ -70,14 +72,9 @@ NetEqImpl::NetEqImpl(int fs, payload_splitter_(payload_splitter), timestamp_scaler_(timestamp_scaler), vad_(new PostDecodeVad()), - algorithm_buffer_(NULL), - sync_buffer_(NULL), - expand_(NULL), - normal_(NULL), - merge_(NULL), - accelerate_(NULL), - preemptive_expand_(NULL), - comfort_noise_(NULL), + expand_factory_(expand_factory), + accelerate_factory_(accelerate_factory), + preemptive_expand_factory_(preemptive_expand_factory), last_mode_(kModeNormal), mute_factor_array_(NULL), decoded_buffer_length_(kMaxFrameSize), @@ -124,13 +121,34 @@ int NetEqImpl::InsertPacket(const WebRtcRTPHeader& rtp_header, int length_bytes, uint32_t receive_timestamp) { CriticalSectionScoped lock(crit_sect_.get()); - NETEQ_LOG_VERBOSE << "InsertPacket: ts=" << rtp_header.header.timestamp << + LOG(LS_VERBOSE) << "InsertPacket: ts=" << rtp_header.header.timestamp << ", sn=" << rtp_header.header.sequenceNumber << ", pt=" << static_cast(rtp_header.header.payloadType) << ", ssrc=" << rtp_header.header.ssrc << ", len=" << length_bytes; int error = InsertPacketInternal(rtp_header, payload, length_bytes, - receive_timestamp); + receive_timestamp, false); + if (error != 0) { + LOG_FERR1(LS_WARNING, InsertPacketInternal, error); + error_code_ = error; + return kFail; + } + return kOK; +} + +int NetEqImpl::InsertSyncPacket(const WebRtcRTPHeader& rtp_header, + uint32_t receive_timestamp) { + CriticalSectionScoped lock(crit_sect_.get()); + LOG(LS_VERBOSE) << "InsertPacket-Sync: ts=" + << rtp_header.header.timestamp << + ", sn=" << rtp_header.header.sequenceNumber << + ", pt=" << static_cast(rtp_header.header.payloadType) << + ", ssrc=" << rtp_header.header.ssrc; + + const uint8_t kSyncPayload[] = { 's', 'y', 'n', 'c' }; + int error = InsertPacketInternal( + rtp_header, kSyncPayload, sizeof(kSyncPayload), receive_timestamp, true); + if (error != 0) { LOG_FERR1(LS_WARNING, InsertPacketInternal, error); error_code_ = error; @@ -143,10 +161,10 @@ int NetEqImpl::GetAudio(size_t max_length, int16_t* output_audio, int* samples_per_channel, int* num_channels, NetEqOutputType* type) { CriticalSectionScoped lock(crit_sect_.get()); - NETEQ_LOG_VERBOSE << "GetAudio"; + LOG(LS_VERBOSE) << "GetAudio"; int error = GetAudioInternal(max_length, output_audio, samples_per_channel, num_channels); - NETEQ_LOG_VERBOSE << "Produced " << *samples_per_channel << + LOG(LS_VERBOSE) << "Produced " << *samples_per_channel << " samples/channel for " << *num_channels << " channel(s)"; if (error != 0) { LOG_FERR1(LS_WARNING, GetAudioInternal, error); @@ -363,7 +381,7 @@ void NetEqImpl::PacketBufferStatistics(int* current_num_packets, current_memory_size_bytes, max_memory_size_bytes); } -int NetEqImpl::DecodedRtpInfo(int* sequence_number, uint32_t* timestamp) { +int NetEqImpl::DecodedRtpInfo(int* sequence_number, uint32_t* timestamp) const { CriticalSectionScoped lock(crit_sect_.get()); if (decoded_packet_sequence_number_ < 0) return -1; @@ -372,28 +390,48 @@ int NetEqImpl::DecodedRtpInfo(int* sequence_number, uint32_t* timestamp) { return 0; } -int NetEqImpl::InsertSyncPacket(const WebRtcRTPHeader& /* rtp_header */, - uint32_t /* receive_timestamp */) { - return kNotImplemented; +void NetEqImpl::SetBackgroundNoiseMode(NetEqBackgroundNoiseMode mode) { + CriticalSectionScoped lock(crit_sect_.get()); + assert(background_noise_.get()); + background_noise_->set_mode(mode); } -void NetEqImpl::SetBackgroundNoiseMode(NetEqBackgroundNoiseMode /* mode */) {} - NetEqBackgroundNoiseMode NetEqImpl::BackgroundNoiseMode() const { - return kBgnOn; + CriticalSectionScoped lock(crit_sect_.get()); + assert(background_noise_.get()); + return background_noise_->mode(); } // Methods below this line are private. - int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header, const uint8_t* payload, int length_bytes, - uint32_t receive_timestamp) { + uint32_t receive_timestamp, + bool is_sync_packet) { if (!payload) { LOG_F(LS_ERROR) << "payload == NULL"; return kInvalidPointer; } + // Sanity checks for sync-packets. + if (is_sync_packet) { + if (decoder_database_->IsDtmf(rtp_header.header.payloadType) || + decoder_database_->IsRed(rtp_header.header.payloadType) || + decoder_database_->IsComfortNoise(rtp_header.header.payloadType)) { + LOG_F(LS_ERROR) << "Sync-packet with an unacceptable payload type " + << rtp_header.header.payloadType; + return kSyncPacketNotAccepted; + } + if (first_packet_ || + rtp_header.header.payloadType != current_rtp_payload_type_ || + rtp_header.header.ssrc != ssrc_) { + // Even if |current_rtp_payload_type_| is 0xFF, sync-packet isn't + // accepted. + LOG_F(LS_ERROR) << "Changing codec, SSRC or first packet " + "with sync-packet."; + return kSyncPacketNotAccepted; + } + } PacketList packet_list; RTPHeader main_header; { @@ -412,6 +450,7 @@ int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header, packet->primary = true; packet->waiting_time = 0; packet->payload = new uint8_t[packet->payload_length]; + packet->sync_packet = is_sync_packet; if (!packet->payload) { LOG_F(LS_ERROR) << "Payload pointer is NULL."; } @@ -423,6 +462,7 @@ int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header, memcpy(&main_header, &packet->header, sizeof(main_header)); } + bool update_sample_rate_and_channels = false; // Reinitialize NetEq if it's needed (changed SSRC or first call). if ((main_header.ssrc != ssrc_) || first_packet_) { rtcp_.Init(main_header.sequenceNumber); @@ -447,13 +487,18 @@ int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header, // Reset timestamp scaling. timestamp_scaler_->Reset(); + + // Triger an update of sampling rate and the number of channels. + update_sample_rate_and_channels = true; } - // Update RTCP statistics. - rtcp_.Update(main_header, receive_timestamp); + // Update RTCP statistics, only for regular packets. + if (!is_sync_packet) + rtcp_.Update(main_header, receive_timestamp); // Check for RED payload type, and separate payloads into several packets. if (decoder_database_->IsRed(main_header.payloadType)) { + assert(!is_sync_packet); // We had a sanity check for this. if (payload_splitter_->SplitRed(&packet_list) != PayloadSplitter::kOK) { LOG_FERR1(LS_WARNING, SplitRed, packet_list.size()); PacketBuffer::DeleteAllPackets(&packet_list); @@ -486,6 +531,7 @@ int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header, assert(current_packet); assert(current_packet->payload); if (decoder_database_->IsDtmf(current_packet->header.payloadType)) { + assert(!current_packet->sync_packet); // We had a sanity check for this. DtmfEvent event; int ret = DtmfBuffer::ParseEvent( current_packet->header.timestamp, @@ -513,7 +559,8 @@ int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header, } // Split payloads into smaller chunks. This also verifies that all payloads - // are of a known payload type. + // are of a known payload type. SplitAudio() method is protected against + // sync-packets. int ret = payload_splitter_->SplitAudio(&packet_list, *decoder_database_); if (ret != PayloadSplitter::kOK) { LOG_FERR1(LS_WARNING, SplitAudio, packet_list.size()); @@ -528,8 +575,8 @@ int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header, } } - // Update bandwidth estimate. - if (!packet_list.empty()) { + // Update bandwidth estimate, if the packet is not sync-packet. + if (!packet_list.empty() && !packet_list.front()->sync_packet) { // The list can be empty here if we got nothing but DTMF payloads. AudioDecoder* decoder = decoder_database_->GetDecoder(main_header.payloadType); @@ -552,6 +599,7 @@ int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header, if (ret == PacketBuffer::kFlushed) { // Reset DSP timestamp etc. if packet buffer flushed. new_codec_ = true; + update_sample_rate_and_channels = true; LOG_F(LS_WARNING) << "Packet buffer flushed"; } else if (ret == PacketBuffer::kOversizePacket) { LOG_F(LS_WARNING) << "Packet larger than packet buffer"; @@ -569,6 +617,26 @@ int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header, } } + if (update_sample_rate_and_channels && !packet_buffer_->Empty()) { + // We do not use |current_rtp_payload_type_| to |set payload_type|, but + // get the next RTP header from |packet_buffer_| to obtain the payload type. + // The reason for it is the following corner case. If NetEq receives a + // CNG packet with a sample rate different than the current CNG then it + // flushes its buffer, assuming send codec must have been changed. However, + // payload type of the hypothetically new send codec is not known. + const RTPHeader* rtp_header = packet_buffer_->NextRtpHeader(); + assert(rtp_header); + int payload_type = rtp_header->payloadType; + AudioDecoder* decoder = decoder_database_->GetDecoder(payload_type); + assert(decoder); // Payloads are already checked to be valid. + const DecoderDatabase::DecoderInfo* decoder_info = + decoder_database_->GetDecoderInfo(payload_type); + assert(decoder_info); + if (decoder_info->fs_hz != fs_hz_ || + decoder->channels() != algorithm_buffer_->Channels()) + SetSampleRateAndChannels(decoder_info->fs_hz, decoder->channels()); + } + // TODO(hlundin): Move this code to DelayManager class. const DecoderDatabase::DecoderInfo* dec_info = decoder_database_->GetDecoderInfo(main_header.payloadType); @@ -617,7 +685,7 @@ int NetEqImpl::GetAudioInternal(size_t max_length, int16_t* output, last_mode_ = kModeError; return return_value; } - NETEQ_LOG_VERBOSE << "GetDecision returned operation=" << operation << + LOG(LS_VERBOSE) << "GetDecision returned operation=" << operation << " and " << packet_list.size() << " packet(s)"; AudioDecoder::SpeechType speech_type; @@ -729,7 +797,7 @@ int NetEqImpl::GetAudioInternal(size_t max_length, int16_t* output, sync_buffer_->GetNextAudioInterleaved(num_output_samples_per_channel, output)); *num_channels = static_cast(sync_buffer_->Channels()); - NETEQ_LOG_VERBOSE << "Sync buffer (" << *num_channels << " channel(s)):" << + LOG(LS_VERBOSE) << "Sync buffer (" << *num_channels << " channel(s)):" << " insert " << algorithm_buffer_->Size() << " samples, extract " << samples_from_sync << " samples"; if (samples_from_sync != output_size_samples_) { @@ -1064,7 +1132,14 @@ int NetEqImpl::Decode(PacketList* packet_list, Operations* operation, PacketBuffer::DeleteAllPackets(packet_list); return kDecoderNotFound; } - SetSampleRateAndChannels(decoder_info->fs_hz, decoder->channels()); + // We should have correct sampling rate and number of channels. They + // are set when packets are inserted. + if (decoder_info->fs_hz != fs_hz_ || + decoder->channels() != algorithm_buffer_->Channels()) { + LOG_F(LS_ERROR) << "Sampling rate or number of channels mismatch."; + assert(false); + SetSampleRateAndChannels(decoder_info->fs_hz, decoder->channels()); + } sync_buffer_->set_end_timestamp(timestamp_); playout_timestamp_ = timestamp_; } @@ -1154,9 +1229,20 @@ int NetEqImpl::DecodeLoop(PacketList* packet_list, Operations* operation, packet_list->pop_front(); int payload_length = packet->payload_length; int16_t decode_length; - if (!packet->primary) { + if (packet->sync_packet) { + // Decode to silence with the same frame size as the last decode. + LOG(LS_VERBOSE) << "Decoding sync-packet: " << + " ts=" << packet->header.timestamp << + ", sn=" << packet->header.sequenceNumber << + ", pt=" << static_cast(packet->header.payloadType) << + ", ssrc=" << packet->header.ssrc << + ", len=" << packet->payload_length; + memset(&decoded_buffer_[*decoded_length], 0, decoder_frame_length_ * + decoder->channels() * sizeof(decoded_buffer_[0])); + decode_length = decoder_frame_length_; + } else if (!packet->primary) { // This is a redundant payload; call the special decoder method. - NETEQ_LOG_VERBOSE << "Decoding packet (redundant):" << + LOG(LS_VERBOSE) << "Decoding packet (redundant):" << " ts=" << packet->header.timestamp << ", sn=" << packet->header.sequenceNumber << ", pt=" << static_cast(packet->header.payloadType) << @@ -1166,7 +1252,7 @@ int NetEqImpl::DecodeLoop(PacketList* packet_list, Operations* operation, packet->payload, packet->payload_length, &decoded_buffer_[*decoded_length], speech_type); } else { - NETEQ_LOG_VERBOSE << "Decoding packet: ts=" << packet->header.timestamp << + LOG(LS_VERBOSE) << "Decoding packet: ts=" << packet->header.timestamp << ", sn=" << packet->header.sequenceNumber << ", pt=" << static_cast(packet->header.payloadType) << ", ssrc=" << packet->header.ssrc << @@ -1179,12 +1265,13 @@ int NetEqImpl::DecodeLoop(PacketList* packet_list, Operations* operation, delete[] packet->payload; delete packet; + packet = NULL; if (decode_length > 0) { *decoded_length += decode_length; // Update |decoder_frame_length_| with number of samples per channel. decoder_frame_length_ = decode_length / static_cast(decoder->channels()); - NETEQ_LOG_VERBOSE << "Decoded " << decode_length << " samples (" << + LOG(LS_VERBOSE) << "Decoded " << decode_length << " samples (" << decoder->channels() << " channel(s) -> " << decoder_frame_length_ << " samples per channel)"; } else if (decode_length < 0) { @@ -1207,10 +1294,10 @@ int NetEqImpl::DecodeLoop(PacketList* packet_list, Operations* operation, } } // End of decode loop. - // If the list is not empty at this point, it must hold exactly one CNG - // packet. - assert(packet_list->empty() || - (packet_list->size() == 1 && + // If the list is not empty at this point, either a decoding error terminated + // the while-loop, or list must hold exactly one CNG packet. + assert(packet_list->empty() || *decoded_length < 0 || + (packet_list->size() == 1 && packet && decoder_database_->IsComfortNoise(packet->header.payloadType))); return 0; } @@ -1628,7 +1715,7 @@ int NetEqImpl::DtmfOverdub(const DtmfEvent& dtmf_event, size_t num_channels, overdub_length = output_size_samples_ - static_cast(out_index); } - AudioMultiVector dtmf_output(num_channels); + AudioMultiVector dtmf_output(num_channels); int dtmf_return_value = 0; if (!dtmf_tone_generator_->initialized()) { dtmf_return_value = dtmf_tone_generator_->Init(fs_hz_, dtmf_event.event_no, @@ -1690,8 +1777,8 @@ int NetEqImpl::ExtractPackets(int required_samples, PacketList* packet_list) { AudioDecoder* decoder = decoder_database_->GetDecoder( packet->header.payloadType); if (decoder) { - packet_duration = decoder->PacketDuration(packet->payload, - packet->payload_length); + packet_duration = packet->sync_packet ? decoder_frame_length_ : + decoder->PacketDuration(packet->payload, packet->payload_length); } else { LOG_FERR1(LS_WARNING, GetDecoder, packet->header.payloadType) << "Could not find a decoder for a packet about to be extracted."; @@ -1754,20 +1841,27 @@ void NetEqImpl::SetSampleRateAndChannels(int fs_hz, size_t channels) { vad_->Init(); // Delete algorithm buffer and create a new one. - algorithm_buffer_.reset(new AudioMultiVector(channels)); + algorithm_buffer_.reset(new AudioMultiVector(channels)); // Delete sync buffer and create a new one. sync_buffer_.reset(new SyncBuffer(channels, kSyncBufferSize * fs_mult_)); - // Delete BackgroundNoise object and create a new one. + + // Delete BackgroundNoise object and create a new one, while preserving its + // mode. + NetEqBackgroundNoiseMode current_mode = kBgnOn; + if (background_noise_.get()) + current_mode = background_noise_->mode(); background_noise_.reset(new BackgroundNoise(channels)); + background_noise_->set_mode(current_mode); // Reset random vector. random_vector_.Reset(); // Delete Expand object and create a new one. - expand_.reset(new Expand(background_noise_.get(), sync_buffer_.get(), - &random_vector_, fs_hz, channels)); + expand_.reset(expand_factory_->Create(background_noise_.get(), + sync_buffer_.get(), &random_vector_, + fs_hz, channels)); // Move index so that we create a small set of future samples (all 0). sync_buffer_->set_next_index(sync_buffer_->next_index() - expand_->overlap_length()); @@ -1775,9 +1869,10 @@ void NetEqImpl::SetSampleRateAndChannels(int fs_hz, size_t channels) { normal_.reset(new Normal(fs_hz, decoder_database_.get(), *background_noise_, expand_.get())); merge_.reset(new Merge(fs_hz, channels, expand_.get(), sync_buffer_.get())); - accelerate_.reset(new Accelerate(fs_hz, channels, *background_noise_)); - preemptive_expand_.reset(new PreemptiveExpand(fs_hz, channels, - *background_noise_)); + accelerate_.reset( + accelerate_factory_->Create(fs_hz, channels, *background_noise_)); + preemptive_expand_.reset( + preemptive_expand_factory_->Create(fs_hz, channels, *background_noise_)); // Delete ComfortNoise object and create a new one. comfort_noise_.reset(new ComfortNoise(fs_hz, decoder_database_.get(), @@ -1800,13 +1895,13 @@ NetEqOutputType NetEqImpl::LastOutputType() { assert(expand_.get()); if (last_mode_ == kModeCodecInternalCng || last_mode_ == kModeRfc3389Cng) { return kOutputCNG; - } else if (vad_->running() && !vad_->active_speech()) { - return kOutputVADPassive; } else if (last_mode_ == kModeExpand && expand_->MuteFactor(0) == 0) { // Expand mode has faded down to background noise only (very long expand). return kOutputPLCtoCNG; } else if (last_mode_ == kModeExpand) { return kOutputPLC; + } else if (vad_->running() && !vad_->active_speech()) { + return kOutputVADPassive; } else { return kOutputNormal; } diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq_impl.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq_impl.h index 60e644fe9f3e..c17ff1e29c7f 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq_impl.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq_impl.h @@ -48,7 +48,10 @@ class PreemptiveExpand; class RandomVector; class SyncBuffer; class TimestampScaler; +struct AccelerateFactory; struct DtmfEvent; +struct ExpandFactory; +struct PreemptiveExpandFactory; class NetEqImpl : public webrtc::NetEq { public: @@ -63,7 +66,10 @@ class NetEqImpl : public webrtc::NetEq { DtmfToneGenerator* dtmf_tone_generator, PacketBuffer* packet_buffer, PayloadSplitter* payload_splitter, - TimestampScaler* timestamp_scaler); + TimestampScaler* timestamp_scaler, + AccelerateFactory* accelerate_factory, + ExpandFactory* expand_factory, + PreemptiveExpandFactory* preemptive_expand_factory); virtual ~NetEqImpl(); @@ -76,6 +82,18 @@ class NetEqImpl : public webrtc::NetEq { int length_bytes, uint32_t receive_timestamp); + // Inserts a sync-packet into packet queue. Sync-packets are decoded to + // silence and are intended to keep AV-sync intact in an event of long packet + // losses when Video NACK is enabled but Audio NACK is not. Clients of NetEq + // might insert sync-packet when they observe that buffer level of NetEq is + // decreasing below a certain threshold, defined by the application. + // Sync-packets should have the same payload type as the last audio payload + // type, i.e. they cannot have DTMF or CNG payload type, nor a codec change + // can be implied by inserting a sync-packet. + // Returns kOk on success, kFail on failure. + virtual int InsertSyncPacket(const WebRtcRTPHeader& rtp_header, + uint32_t receive_timestamp); + // Instructs NetEq to deliver 10 ms of audio data. The data is written to // |output_audio|, which can hold (at least) |max_length| elements. // The number of channels that were written to the output is provided in @@ -173,13 +191,12 @@ class NetEqImpl : public webrtc::NetEq { // Get sequence number and timestamp of the latest RTP. // This method is to facilitate NACK. - virtual int DecodedRtpInfo(int* sequence_number, uint32_t* timestamp); - - virtual int InsertSyncPacket(const WebRtcRTPHeader& rtp_header, - uint32_t receive_timestamp); + virtual int DecodedRtpInfo(int* sequence_number, uint32_t* timestamp) const; + // Sets background noise mode. virtual void SetBackgroundNoiseMode(NetEqBackgroundNoiseMode mode); + // Gets background noise mode. virtual NetEqBackgroundNoiseMode BackgroundNoiseMode() const; private: @@ -194,7 +211,8 @@ class NetEqImpl : public webrtc::NetEq { int InsertPacketInternal(const WebRtcRTPHeader& rtp_header, const uint8_t* payload, int length_bytes, - uint32_t receive_timestamp); + uint32_t receive_timestamp, + bool is_sync_packet); // Delivers 10 ms of audio data. The data is written to |output|, which can @@ -300,13 +318,16 @@ class NetEqImpl : public webrtc::NetEq { scoped_ptr timestamp_scaler_; scoped_ptr decision_logic_; scoped_ptr vad_; - scoped_ptr > algorithm_buffer_; + scoped_ptr algorithm_buffer_; scoped_ptr sync_buffer_; scoped_ptr expand_; + scoped_ptr expand_factory_; scoped_ptr normal_; scoped_ptr merge_; scoped_ptr accelerate_; + scoped_ptr accelerate_factory_; scoped_ptr preemptive_expand_; + scoped_ptr preemptive_expand_factory_; RandomVector random_vector_; scoped_ptr comfort_noise_; Rtcp rtcp_; diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq_impl_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq_impl_unittest.cc index 47fa18e20032..0fbcedbedb22 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq_impl_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq_impl_unittest.cc @@ -13,6 +13,8 @@ #include "gmock/gmock.h" #include "gtest/gtest.h" +#include "webrtc/modules/audio_coding/neteq4/accelerate.h" +#include "webrtc/modules/audio_coding/neteq4/expand.h" #include "webrtc/modules/audio_coding/neteq4/mock/mock_audio_decoder.h" #include "webrtc/modules/audio_coding/neteq4/mock/mock_buffer_level_filter.h" #include "webrtc/modules/audio_coding/neteq4/mock/mock_decoder_database.h" @@ -22,6 +24,7 @@ #include "webrtc/modules/audio_coding/neteq4/mock/mock_dtmf_tone_generator.h" #include "webrtc/modules/audio_coding/neteq4/mock/mock_packet_buffer.h" #include "webrtc/modules/audio_coding/neteq4/mock/mock_payload_splitter.h" +#include "webrtc/modules/audio_coding/neteq4/preemptive_expand.h" #include "webrtc/modules/audio_coding/neteq4/timestamp_scaler.h" using ::testing::Return; @@ -60,6 +63,11 @@ class NetEqImplTest : public ::testing::Test { timestamp_scaler_ = new TimestampScaler(*decoder_database_); EXPECT_CALL(*decoder_database_, GetActiveCngDecoder()) .WillOnce(ReturnNull()); + AccelerateFactory* accelerate_factory = new AccelerateFactory; + ExpandFactory* expand_factory = new ExpandFactory; + PreemptiveExpandFactory* preemptive_expand_factory = + new PreemptiveExpandFactory; + neteq_ = new NetEqImpl(kInitSampleRateHz, buffer_level_filter_, decoder_database_, @@ -69,7 +77,10 @@ class NetEqImplTest : public ::testing::Test { dtmf_tone_generator_, packet_buffer_, payload_splitter_, - timestamp_scaler_); + timestamp_scaler_, + accelerate_factory, + expand_factory, + preemptive_expand_factory); } virtual ~NetEqImplTest() { @@ -159,7 +170,7 @@ TEST_F(NetEqImplTest, InsertPacket) { EXPECT_CALL(*decoder_database_, IsDtmf(kPayloadType)) .WillRepeatedly(Return(false)); // This is not DTMF. EXPECT_CALL(*decoder_database_, GetDecoder(kPayloadType)) - .Times(2) + .Times(3) .WillRepeatedly(Return(&mock_decoder)); EXPECT_CALL(*decoder_database_, IsComfortNoise(kPayloadType)) .WillRepeatedly(Return(false)); // This is not CNG. @@ -183,6 +194,9 @@ TEST_F(NetEqImplTest, InsertPacket) { // index) is a pointer, and the variable pointed to is set to kPayloadType. // Also invoke the function DeletePacketsAndReturnOk to properly delete all // packets in the list (to avoid memory leaks in the test). + EXPECT_CALL(*packet_buffer_, NextRtpHeader()) + .Times(1) + .WillOnce(Return(&rtp_header.header)); // Expectations for DTMF buffer. EXPECT_CALL(*dtmf_buffer_, Flush()) diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq_tests.gypi b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq_tests.gypi index faf7332f28dc..419aefa1c30f 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq_tests.gypi +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq_tests.gypi @@ -14,6 +14,8 @@ 'dependencies': [ 'NetEq4', 'NetEq4TestTools', + 'neteq_unittest_tools', + 'PCM16B', '<(webrtc_root)/test/test.gyp:test_support_main', '<(DEPTH)/third_party/gflags/gflags.gyp:gflags', ], @@ -56,6 +58,7 @@ 'include_dirs': [ 'interface', 'test', + '<(webrtc_root)', ], 'sources': [ 'test/RTPencode.cc', @@ -145,6 +148,7 @@ 'neteq_unittest_tools', 'PCM16B', '<(DEPTH)/third_party/gflags/gflags.gyp:gflags', + '<(webrtc_root)/test/test.gyp:test_support_main', ], 'sources': [ 'test/neteq_speed_test.cc', @@ -172,6 +176,7 @@ 'include_dirs': [ 'interface', 'test', + '<(webrtc_root)', ], }, 'defines': [ @@ -179,6 +184,7 @@ 'include_dirs': [ 'interface', 'test', + '<(webrtc_root)', ], 'sources': [ 'test/NETEQTEST_DummyRTPpacket.cc', diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq_unittest.cc index 5ab2d1f31236..47e9e855e2a4 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/neteq_unittest.cc @@ -17,17 +17,37 @@ #include #include // memset +#include +#include #include #include +#include "gflags/gflags.h" #include "gtest/gtest.h" #include "webrtc/modules/audio_coding/neteq4/test/NETEQTEST_RTPpacket.h" +#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h" #include "webrtc/test/testsupport/fileutils.h" #include "webrtc/test/testsupport/gtest_disable.h" #include "webrtc/typedefs.h" +DEFINE_bool(gen_ref, false, "Generate reference files."); + namespace webrtc { +static bool IsAllZero(const int16_t* buf, int buf_length) { + bool all_zero = true; + for (int n = 0; n < buf_length && all_zero; ++n) + all_zero = buf[n] == 0; + return all_zero; +} + +static bool IsAllNonZero(const int16_t* buf, int buf_length) { + bool all_non_zero = true; + for (int n = 0; n < buf_length && all_non_zero; ++n) + all_non_zero = buf[n] != 0; + return all_non_zero; +} + class RefFiles { public: RefFiles(const std::string& input_file, const std::string& output_file); @@ -130,7 +150,8 @@ void RefFiles::WriteToFile(const RtcpStatistics& stats) { output_fp_)); ASSERT_EQ(1u, fwrite(&(stats.cumulative_lost), sizeof(stats.cumulative_lost), 1, output_fp_)); - ASSERT_EQ(1u, fwrite(&(stats.extended_max), sizeof(stats.extended_max), 1, + ASSERT_EQ(1u, fwrite(&(stats.extended_max_sequence_number), + sizeof(stats.extended_max_sequence_number), 1, output_fp_)); ASSERT_EQ(1u, fwrite(&(stats.jitter), sizeof(stats.jitter), 1, output_fp_)); @@ -146,14 +167,16 @@ void RefFiles::ReadFromFileAndCompare( sizeof(ref_stats.fraction_lost), 1, input_fp_)); ASSERT_EQ(1u, fread(&(ref_stats.cumulative_lost), sizeof(ref_stats.cumulative_lost), 1, input_fp_)); - ASSERT_EQ(1u, fread(&(ref_stats.extended_max), - sizeof(ref_stats.extended_max), 1, input_fp_)); + ASSERT_EQ(1u, fread(&(ref_stats.extended_max_sequence_number), + sizeof(ref_stats.extended_max_sequence_number), 1, + input_fp_)); ASSERT_EQ(1u, fread(&(ref_stats.jitter), sizeof(ref_stats.jitter), 1, input_fp_)); // Compare EXPECT_EQ(ref_stats.fraction_lost, stats.fraction_lost); EXPECT_EQ(ref_stats.cumulative_lost, stats.cumulative_lost); - EXPECT_EQ(ref_stats.extended_max, stats.extended_max); + EXPECT_EQ(ref_stats.extended_max_sequence_number, + stats.extended_max_sequence_number); EXPECT_EQ(ref_stats.jitter, stats.jitter); } } @@ -190,6 +213,14 @@ class NetEqDecodingTest : public ::testing::Test { uint8_t* payload, int* payload_len); + void CheckBgnOff(int sampling_rate, NetEqBackgroundNoiseMode bgn_mode); + + void WrapTest(uint16_t start_seq_no, uint32_t start_timestamp, + const std::set& drop_seq_numbers, + bool expect_seq_no_wrap, bool expect_timestamp_wrap); + + void LongCngWithClockDrift(double drift_factor); + NetEq* neteq_; FILE* rtp_fp_; unsigned int sim_clock_; @@ -236,10 +267,12 @@ void NetEqDecodingTest::LoadDecoders() { #endif // WEBRTC_ANDROID // Load iSAC. ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderISAC, 103)); +#ifndef WEBRTC_ANDROID // Load iSAC SWB. ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderISACswb, 104)); // Load iSAC FB. ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderISACfb, 105)); +#endif // WEBRTC_ANDROID // Load PCM16B nb. ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderPCM16B, 93)); // Load PCM16B wb. @@ -295,7 +328,7 @@ void NetEqDecodingTest::DecodeAndCompare(const std::string &rtp_file, std::string ref_out_file = ""; if (ref_file.empty()) { - ref_out_file = webrtc::test::OutputPath() + "neteq_out.pcm"; + ref_out_file = webrtc::test::OutputPath() + "neteq_universal_ref.pcm"; } RefFiles ref_files(ref_file, ref_out_file); @@ -306,7 +339,7 @@ void NetEqDecodingTest::DecodeAndCompare(const std::string &rtp_file, std::ostringstream ss; ss << "Lap number " << i++ << " in DecodeAndCompare while loop"; SCOPED_TRACE(ss.str()); // Print out the parameter values on failure. - int out_len; + int out_len = 0; ASSERT_NO_FATAL_FAILURE(Process(&rtp, &out_len)); ASSERT_NO_FATAL_FAILURE(ref_files.ProcessReference(out_data_, out_len)); } @@ -375,6 +408,107 @@ void NetEqDecodingTest::PopulateCng(int frame_index, *payload_len = 1; // Only noise level, no spectral parameters. } +void NetEqDecodingTest::CheckBgnOff(int sampling_rate_hz, + NetEqBackgroundNoiseMode bgn_mode) { + int expected_samples_per_channel = 0; + uint8_t payload_type = 0xFF; // Invalid. + if (sampling_rate_hz == 8000) { + expected_samples_per_channel = kBlockSize8kHz; + payload_type = 93; // PCM 16, 8 kHz. + } else if (sampling_rate_hz == 16000) { + expected_samples_per_channel = kBlockSize16kHz; + payload_type = 94; // PCM 16, 16 kHZ. + } else if (sampling_rate_hz == 32000) { + expected_samples_per_channel = kBlockSize32kHz; + payload_type = 95; // PCM 16, 32 kHz. + } else { + ASSERT_TRUE(false); // Unsupported test case. + } + + NetEqOutputType type; + int16_t output[kBlockSize32kHz]; // Maximum size is chosen. + int16_t input[kBlockSize32kHz]; // Maximum size is chosen. + + // Payload of 10 ms of PCM16 32 kHz. + uint8_t payload[kBlockSize32kHz * sizeof(int16_t)]; + + // Random payload. + for (int n = 0; n < expected_samples_per_channel; ++n) { + input[n] = (rand() & ((1 << 10) - 1)) - ((1 << 5) - 1); + } + int enc_len_bytes = WebRtcPcm16b_EncodeW16( + input, expected_samples_per_channel, reinterpret_cast(payload)); + ASSERT_EQ(enc_len_bytes, expected_samples_per_channel * 2); + + WebRtcRTPHeader rtp_info; + PopulateRtpInfo(0, 0, &rtp_info); + rtp_info.header.payloadType = payload_type; + + int number_channels = 0; + int samples_per_channel = 0; + + uint32_t receive_timestamp = 0; + for (int n = 0; n < 10; ++n) { // Insert few packets and get audio. + number_channels = 0; + samples_per_channel = 0; + ASSERT_EQ(0, neteq_->InsertPacket( + rtp_info, payload, enc_len_bytes, receive_timestamp)); + ASSERT_EQ(0, neteq_->GetAudio(kBlockSize32kHz, output, &samples_per_channel, + &number_channels, &type)); + ASSERT_EQ(1, number_channels); + ASSERT_EQ(expected_samples_per_channel, samples_per_channel); + ASSERT_EQ(kOutputNormal, type); + + // Next packet. + rtp_info.header.timestamp += expected_samples_per_channel; + rtp_info.header.sequenceNumber++; + receive_timestamp += expected_samples_per_channel; + } + + number_channels = 0; + samples_per_channel = 0; + + // Get audio without inserting packets, expecting PLC and PLC-to-CNG. Pull one + // frame without checking speech-type. This is the first frame pulled without + // inserting any packet, and might not be labeled as PCL. + ASSERT_EQ(0, neteq_->GetAudio(kBlockSize32kHz, output, &samples_per_channel, + &number_channels, &type)); + ASSERT_EQ(1, number_channels); + ASSERT_EQ(expected_samples_per_channel, samples_per_channel); + + // To be able to test the fading of background noise we need at lease to pull + // 610 frames. + const int kFadingThreshold = 610; + + // Test several CNG-to-PLC packet for the expected behavior. The number 20 is + // arbitrary, but sufficiently large to test enough number of frames. + const int kNumPlcToCngTestFrames = 20; + bool plc_to_cng = false; + for (int n = 0; n < kFadingThreshold + kNumPlcToCngTestFrames; ++n) { + number_channels = 0; + samples_per_channel = 0; + memset(output, 1, sizeof(output)); // Set to non-zero. + ASSERT_EQ(0, neteq_->GetAudio(kBlockSize32kHz, output, &samples_per_channel, + &number_channels, &type)); + ASSERT_EQ(1, number_channels); + ASSERT_EQ(expected_samples_per_channel, samples_per_channel); + if (type == kOutputPLCtoCNG) { + plc_to_cng = true; + double sum_squared = 0; + for (int k = 0; k < number_channels * samples_per_channel; ++k) + sum_squared += output[k] * output[k]; + if (bgn_mode == kBgnOn) { + EXPECT_NE(0, sum_squared); + } else if (bgn_mode == kBgnOff || n > kFadingThreshold) { + EXPECT_EQ(0, sum_squared); + } + } else { + EXPECT_EQ(kOutputPLC, type); + } + } + EXPECT_TRUE(plc_to_cng); // Just to be sure that PLC-to-CNG has occurred. +} + #if defined(_WIN32) && defined(WEBRTC_ARCH_64_BITS) // Disabled for Windows 64-bit until webrtc:1458 is fixed. #define MAYBE_TestBitExactness DISABLED_TestBitExactness @@ -383,35 +517,45 @@ void NetEqDecodingTest::PopulateCng(int frame_index, #endif TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(MAYBE_TestBitExactness)) { - const std::string kInputRtpFile = webrtc::test::ProjectRootPath() + + const std::string input_rtp_file = webrtc::test::ProjectRootPath() + "resources/audio_coding/neteq_universal_new.rtp"; #if defined(_MSC_VER) && (_MSC_VER >= 1700) // For Visual Studio 2012 and later, we will have to use the generic reference // file, rather than the windows-specific one. - const std::string kInputRefFile = webrtc::test::ProjectRootPath() + - "resources/audio_coding/neteq_universal_ref.pcm"; + const std::string input_ref_file = webrtc::test::ProjectRootPath() + + "resources/audio_coding/neteq4_universal_ref.pcm"; #else - const std::string kInputRefFile = - webrtc::test::ResourcePath("audio_coding/neteq_universal_ref", "pcm"); + const std::string input_ref_file = + webrtc::test::ResourcePath("audio_coding/neteq4_universal_ref", "pcm"); #endif - DecodeAndCompare(kInputRtpFile, kInputRefFile); + + if (FLAGS_gen_ref) { + DecodeAndCompare(input_rtp_file, ""); + } else { + DecodeAndCompare(input_rtp_file, input_ref_file); + } } TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(TestNetworkStatistics)) { - const std::string kInputRtpFile = webrtc::test::ProjectRootPath() + + const std::string input_rtp_file = webrtc::test::ProjectRootPath() + "resources/audio_coding/neteq_universal_new.rtp"; #if defined(_MSC_VER) && (_MSC_VER >= 1700) // For Visual Studio 2012 and later, we will have to use the generic reference // file, rather than the windows-specific one. - const std::string kNetworkStatRefFile = webrtc::test::ProjectRootPath() + - "resources/audio_coding/neteq_network_stats.dat"; + const std::string network_stat_ref_file = webrtc::test::ProjectRootPath() + + "resources/audio_coding/neteq4_network_stats.dat"; #else - const std::string kNetworkStatRefFile = - webrtc::test::ResourcePath("audio_coding/neteq_network_stats", "dat"); + const std::string network_stat_ref_file = + webrtc::test::ResourcePath("audio_coding/neteq4_network_stats", "dat"); #endif - const std::string kRtcpStatRefFile = - webrtc::test::ResourcePath("audio_coding/neteq_rtcp_stats", "dat"); - DecodeAndCheckStats(kInputRtpFile, kNetworkStatRefFile, kRtcpStatRefFile); + const std::string rtcp_stat_ref_file = + webrtc::test::ResourcePath("audio_coding/neteq4_rtcp_stats", "dat"); + if (FLAGS_gen_ref) { + DecodeAndCheckStats(input_rtp_file, "", ""); + } else { + DecodeAndCheckStats(input_rtp_file, network_stat_ref_file, + rtcp_stat_ref_file); + } } // TODO(hlundin): Re-enable test once the statistics interface is up and again. @@ -449,7 +593,6 @@ TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(TestFrameWaitingTimeStatistics)) { std::vector waiting_times; neteq_->WaitingTimes(&waiting_times); - int len = waiting_times.size(); EXPECT_EQ(num_frames, waiting_times.size()); // Since all frames are dumped into NetEQ at once, but pulled out with 10 ms // spacing (per definition), we expect the delay to increase with 10 ms for @@ -460,7 +603,7 @@ TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(TestFrameWaitingTimeStatistics)) { // Check statistics again and make sure it's been reset. neteq_->WaitingTimes(&waiting_times); - len = waiting_times.size(); + int len = waiting_times.size(); EXPECT_EQ(0, len); // Process > 100 frames, and make sure that that we get statistics @@ -554,14 +697,12 @@ TEST_F(NetEqDecodingTest, EXPECT_EQ(110946, network_stats.clockdrift_ppm); } -TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(LongCngWithClockDrift)) { +void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor) { uint16_t seq_no = 0; uint32_t timestamp = 0; const int kFrameSizeMs = 30; const int kSamples = kFrameSizeMs * 16; const int kPayloadBytes = kSamples * 2; - // Apply a clock drift of -25 ms / s (sender faster than receiver). - const double kDriftFactor = 1000.0 / (1000.0 + 25.0); double next_input_time_ms = 0.0; double t_ms; NetEqOutputType type; @@ -578,7 +719,7 @@ TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(LongCngWithClockDrift)) { ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0)); ++seq_no; timestamp += kSamples; - next_input_time_ms += static_cast(kFrameSizeMs) * kDriftFactor; + next_input_time_ms += static_cast(kFrameSizeMs) * drift_factor; } // Pull out data once. int out_len; @@ -606,7 +747,7 @@ TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(LongCngWithClockDrift)) { ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, payload_len, 0)); ++seq_no; timestamp += kCngPeriodSamples; - next_input_time_ms += static_cast(kCngPeriodMs) * kDriftFactor; + next_input_time_ms += static_cast(kCngPeriodMs) * drift_factor; } // Pull out data once. int out_len; @@ -629,7 +770,7 @@ TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(LongCngWithClockDrift)) { ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0)); ++seq_no; timestamp += kSamples; - next_input_time_ms += static_cast(kFrameSizeMs) * kDriftFactor; + next_input_time_ms += static_cast(kFrameSizeMs) * drift_factor; } // Pull out data once. int out_len; @@ -647,6 +788,20 @@ TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(LongCngWithClockDrift)) { EXPECT_GE(delay_after, delay_before - 20 * 16); } +TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(LongCngWithClockNegativeDrift)) { + // Apply a clock drift of -25 ms / s (sender faster than receiver). + const double kDriftFactor = 1000.0 / (1000.0 + 25.0); + LongCngWithClockDrift(kDriftFactor); +} + +// TODO(hlundin): Re-enable this test and fix the issues to make it pass. +TEST_F(NetEqDecodingTest, + DISABLED_ON_ANDROID(DISABLED_LongCngWithClockPositiveDrift)) { + // Apply a clock drift of +25 ms / s (sender slower than receiver). + const double kDriftFactor = 1000.0 / (1000.0 - 25.0); + LongCngWithClockDrift(kDriftFactor); +} + TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(UnknownPayloadType)) { const int kPayloadBytes = 100; uint8_t payload[kPayloadBytes] = {0}; @@ -731,4 +886,348 @@ TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(GetAudioBeforeInsertPacket)) { EXPECT_EQ(0, out_data_[i]); } } -} // namespace + +TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(BackgroundNoise)) { + neteq_->SetBackgroundNoiseMode(kBgnOn); + CheckBgnOff(8000, kBgnOn); + CheckBgnOff(16000, kBgnOn); + CheckBgnOff(32000, kBgnOn); + EXPECT_EQ(kBgnOn, neteq_->BackgroundNoiseMode()); + + neteq_->SetBackgroundNoiseMode(kBgnOff); + CheckBgnOff(8000, kBgnOff); + CheckBgnOff(16000, kBgnOff); + CheckBgnOff(32000, kBgnOff); + EXPECT_EQ(kBgnOff, neteq_->BackgroundNoiseMode()); + + neteq_->SetBackgroundNoiseMode(kBgnFade); + CheckBgnOff(8000, kBgnFade); + CheckBgnOff(16000, kBgnFade); + CheckBgnOff(32000, kBgnFade); + EXPECT_EQ(kBgnFade, neteq_->BackgroundNoiseMode()); +} + +TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(SyncPacketInsert)) { + WebRtcRTPHeader rtp_info; + uint32_t receive_timestamp = 0; + // For the readability use the following payloads instead of the defaults of + // this test. + uint8_t kPcm16WbPayloadType = 1; + uint8_t kCngNbPayloadType = 2; + uint8_t kCngWbPayloadType = 3; + uint8_t kCngSwb32PayloadType = 4; + uint8_t kCngSwb48PayloadType = 5; + uint8_t kAvtPayloadType = 6; + uint8_t kRedPayloadType = 7; + uint8_t kIsacPayloadType = 9; // Payload type 8 is already registered. + + // Register decoders. + ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderPCM16Bwb, + kPcm16WbPayloadType)); + ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderCNGnb, kCngNbPayloadType)); + ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderCNGwb, kCngWbPayloadType)); + ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderCNGswb32kHz, + kCngSwb32PayloadType)); + ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderCNGswb48kHz, + kCngSwb48PayloadType)); + ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderAVT, kAvtPayloadType)); + ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderRED, kRedPayloadType)); + ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderISAC, kIsacPayloadType)); + + PopulateRtpInfo(0, 0, &rtp_info); + rtp_info.header.payloadType = kPcm16WbPayloadType; + + // The first packet injected cannot be sync-packet. + EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp)); + + // Payload length of 10 ms PCM16 16 kHz. + const int kPayloadBytes = kBlockSize16kHz * sizeof(int16_t); + uint8_t payload[kPayloadBytes] = {0}; + ASSERT_EQ(0, neteq_->InsertPacket( + rtp_info, payload, kPayloadBytes, receive_timestamp)); + + // Next packet. Last packet contained 10 ms audio. + rtp_info.header.sequenceNumber++; + rtp_info.header.timestamp += kBlockSize16kHz; + receive_timestamp += kBlockSize16kHz; + + // Unacceptable payload types CNG, AVT (DTMF), RED. + rtp_info.header.payloadType = kCngNbPayloadType; + EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp)); + + rtp_info.header.payloadType = kCngWbPayloadType; + EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp)); + + rtp_info.header.payloadType = kCngSwb32PayloadType; + EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp)); + + rtp_info.header.payloadType = kCngSwb48PayloadType; + EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp)); + + rtp_info.header.payloadType = kAvtPayloadType; + EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp)); + + rtp_info.header.payloadType = kRedPayloadType; + EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp)); + + // Change of codec cannot be initiated with a sync packet. + rtp_info.header.payloadType = kIsacPayloadType; + EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp)); + + // Change of SSRC is not allowed with a sync packet. + rtp_info.header.payloadType = kPcm16WbPayloadType; + ++rtp_info.header.ssrc; + EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp)); + + --rtp_info.header.ssrc; + EXPECT_EQ(0, neteq_->InsertSyncPacket(rtp_info, receive_timestamp)); +} + +// First insert several noise like packets, then sync-packets. Decoding all +// packets should not produce error, statistics should not show any packet loss +// and sync-packets should decode to zero. +TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(SyncPacketDecode)) { + WebRtcRTPHeader rtp_info; + PopulateRtpInfo(0, 0, &rtp_info); + const int kPayloadBytes = kBlockSize16kHz * sizeof(int16_t); + uint8_t payload[kPayloadBytes]; + int16_t decoded[kBlockSize16kHz]; + for (int n = 0; n < kPayloadBytes; ++n) { + payload[n] = (rand() & 0xF0) + 1; // Non-zero random sequence. + } + // Insert some packets which decode to noise. We are not interested in + // actual decoded values. + NetEqOutputType output_type; + int num_channels; + int samples_per_channel; + uint32_t receive_timestamp = 0; + int delay_samples = 0; + for (int n = 0; n < 100; ++n) { + ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, + receive_timestamp)); + ASSERT_EQ(0, neteq_->GetAudio(kBlockSize16kHz, decoded, + &samples_per_channel, &num_channels, + &output_type)); + ASSERT_EQ(kBlockSize16kHz, samples_per_channel); + ASSERT_EQ(1, num_channels); + + // Even if there is RTP packet in NetEq's buffer, the first frame pulled + // from NetEq starts with few zero samples. Here we measure this delay. + if (n == 0) { + while (decoded[delay_samples] == 0) delay_samples++; + } + rtp_info.header.sequenceNumber++; + rtp_info.header.timestamp += kBlockSize16kHz; + receive_timestamp += kBlockSize16kHz; + } + const int kNumSyncPackets = 10; + // Insert sync-packets, the decoded sequence should be all-zero. + for (int n = 0; n < kNumSyncPackets; ++n) { + ASSERT_EQ(0, neteq_->InsertSyncPacket(rtp_info, receive_timestamp)); + ASSERT_EQ(0, neteq_->GetAudio(kBlockSize16kHz, decoded, + &samples_per_channel, &num_channels, + &output_type)); + ASSERT_EQ(kBlockSize16kHz, samples_per_channel); + ASSERT_EQ(1, num_channels); + EXPECT_TRUE(IsAllZero(&decoded[delay_samples], + samples_per_channel * num_channels - delay_samples)); + delay_samples = 0; // Delay only matters in the first frame. + rtp_info.header.sequenceNumber++; + rtp_info.header.timestamp += kBlockSize16kHz; + receive_timestamp += kBlockSize16kHz; + } + // We insert a regular packet, if sync packet are not correctly buffered then + // network statistics would show some packet loss. + ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, + receive_timestamp)); + ASSERT_EQ(0, neteq_->GetAudio(kBlockSize16kHz, decoded, + &samples_per_channel, &num_channels, + &output_type)); + // Make sure the last inserted packet is decoded and there are non-zero + // samples. + EXPECT_FALSE(IsAllZero(decoded, samples_per_channel * num_channels)); + NetEqNetworkStatistics network_stats; + ASSERT_EQ(0, neteq_->NetworkStatistics(&network_stats)); + // Expecting a "clean" network. + EXPECT_EQ(0, network_stats.packet_loss_rate); + EXPECT_EQ(0, network_stats.expand_rate); + EXPECT_EQ(0, network_stats.accelerate_rate); + EXPECT_EQ(0, network_stats.preemptive_rate); +} + +// Test if the size of the packet buffer reported correctly when containing +// sync packets. Also, test if network packets override sync packets. That is to +// prefer decoding a network packet to a sync packet, if both have same sequence +// number and timestamp. +TEST_F(NetEqDecodingTest, + DISABLED_ON_ANDROID(SyncPacketBufferSizeAndOverridenByNetworkPackets)) { + WebRtcRTPHeader rtp_info; + PopulateRtpInfo(0, 0, &rtp_info); + const int kPayloadBytes = kBlockSize16kHz * sizeof(int16_t); + uint8_t payload[kPayloadBytes]; + int16_t decoded[kBlockSize16kHz]; + for (int n = 0; n < kPayloadBytes; ++n) { + payload[n] = (rand() & 0xF0) + 1; // Non-zero random sequence. + } + // Insert some packets which decode to noise. We are not interested in + // actual decoded values. + NetEqOutputType output_type; + int num_channels; + int samples_per_channel; + uint32_t receive_timestamp = 0; + for (int n = 0; n < 1; ++n) { + ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, + receive_timestamp)); + ASSERT_EQ(0, neteq_->GetAudio(kBlockSize16kHz, decoded, + &samples_per_channel, &num_channels, + &output_type)); + ASSERT_EQ(kBlockSize16kHz, samples_per_channel); + ASSERT_EQ(1, num_channels); + rtp_info.header.sequenceNumber++; + rtp_info.header.timestamp += kBlockSize16kHz; + receive_timestamp += kBlockSize16kHz; + } + const int kNumSyncPackets = 10; + + WebRtcRTPHeader first_sync_packet_rtp_info; + memcpy(&first_sync_packet_rtp_info, &rtp_info, sizeof(rtp_info)); + + // Insert sync-packets, but no decoding. + for (int n = 0; n < kNumSyncPackets; ++n) { + ASSERT_EQ(0, neteq_->InsertSyncPacket(rtp_info, receive_timestamp)); + rtp_info.header.sequenceNumber++; + rtp_info.header.timestamp += kBlockSize16kHz; + receive_timestamp += kBlockSize16kHz; + } + NetEqNetworkStatistics network_stats; + ASSERT_EQ(0, neteq_->NetworkStatistics(&network_stats)); + EXPECT_EQ(kNumSyncPackets * 10, network_stats.current_buffer_size_ms); + + // Rewind |rtp_info| to that of the first sync packet. + memcpy(&rtp_info, &first_sync_packet_rtp_info, sizeof(rtp_info)); + + // Insert. + for (int n = 0; n < kNumSyncPackets; ++n) { + ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, + receive_timestamp)); + rtp_info.header.sequenceNumber++; + rtp_info.header.timestamp += kBlockSize16kHz; + receive_timestamp += kBlockSize16kHz; + } + + // Decode. + for (int n = 0; n < kNumSyncPackets; ++n) { + ASSERT_EQ(0, neteq_->GetAudio(kBlockSize16kHz, decoded, + &samples_per_channel, &num_channels, + &output_type)); + ASSERT_EQ(kBlockSize16kHz, samples_per_channel); + ASSERT_EQ(1, num_channels); + EXPECT_TRUE(IsAllNonZero(decoded, samples_per_channel * num_channels)); + } +} + +void NetEqDecodingTest::WrapTest(uint16_t start_seq_no, + uint32_t start_timestamp, + const std::set& drop_seq_numbers, + bool expect_seq_no_wrap, + bool expect_timestamp_wrap) { + uint16_t seq_no = start_seq_no; + uint32_t timestamp = start_timestamp; + const int kBlocksPerFrame = 3; // Number of 10 ms blocks per frame. + const int kFrameSizeMs = kBlocksPerFrame * kTimeStepMs; + const int kSamples = kBlockSize16kHz * kBlocksPerFrame; + const int kPayloadBytes = kSamples * sizeof(int16_t); + double next_input_time_ms = 0.0; + int16_t decoded[kBlockSize16kHz]; + int num_channels; + int samples_per_channel; + NetEqOutputType output_type; + uint32_t receive_timestamp = 0; + + // Insert speech for 1 second. + const int kSpeechDurationMs = 2000; + int packets_inserted = 0; + uint16_t last_seq_no; + uint32_t last_timestamp; + bool timestamp_wrapped = false; + bool seq_no_wrapped = false; + for (double t_ms = 0; t_ms < kSpeechDurationMs; t_ms += 10) { + // Each turn in this for loop is 10 ms. + while (next_input_time_ms <= t_ms) { + // Insert one 30 ms speech frame. + uint8_t payload[kPayloadBytes] = {0}; + WebRtcRTPHeader rtp_info; + PopulateRtpInfo(seq_no, timestamp, &rtp_info); + if (drop_seq_numbers.find(seq_no) == drop_seq_numbers.end()) { + // This sequence number was not in the set to drop. Insert it. + ASSERT_EQ(0, + neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, + receive_timestamp)); + ++packets_inserted; + } + NetEqNetworkStatistics network_stats; + ASSERT_EQ(0, neteq_->NetworkStatistics(&network_stats)); + + // Due to internal NetEq logic, preferred buffer-size is about 4 times the + // packet size for first few packets. Therefore we refrain from checking + // the criteria. + if (packets_inserted > 4) { + // Expect preferred and actual buffer size to be no more than 2 frames. + EXPECT_LE(network_stats.preferred_buffer_size_ms, kFrameSizeMs * 2); + EXPECT_LE(network_stats.current_buffer_size_ms, kFrameSizeMs * 2); + } + last_seq_no = seq_no; + last_timestamp = timestamp; + + ++seq_no; + timestamp += kSamples; + receive_timestamp += kSamples; + next_input_time_ms += static_cast(kFrameSizeMs); + + seq_no_wrapped |= seq_no < last_seq_no; + timestamp_wrapped |= timestamp < last_timestamp; + } + // Pull out data once. + ASSERT_EQ(0, neteq_->GetAudio(kBlockSize16kHz, decoded, + &samples_per_channel, &num_channels, + &output_type)); + ASSERT_EQ(kBlockSize16kHz, samples_per_channel); + ASSERT_EQ(1, num_channels); + + // Expect delay (in samples) to be less than 2 packets. + EXPECT_LE(timestamp - neteq_->PlayoutTimestamp(), + static_cast(kSamples * 2)); + } + // Make sure we have actually tested wrap-around. + ASSERT_EQ(expect_seq_no_wrap, seq_no_wrapped); + ASSERT_EQ(expect_timestamp_wrap, timestamp_wrapped); +} + +TEST_F(NetEqDecodingTest, SequenceNumberWrap) { + // Start with a sequence number that will soon wrap. + std::set drop_seq_numbers; // Don't drop any packets. + WrapTest(0xFFFF - 10, 0, drop_seq_numbers, true, false); +} + +TEST_F(NetEqDecodingTest, SequenceNumberWrapAndDrop) { + // Start with a sequence number that will soon wrap. + std::set drop_seq_numbers; + drop_seq_numbers.insert(0xFFFF); + drop_seq_numbers.insert(0x0); + WrapTest(0xFFFF - 10, 0, drop_seq_numbers, true, false); +} + +TEST_F(NetEqDecodingTest, TimestampWrap) { + // Start with a timestamp that will soon wrap. + std::set drop_seq_numbers; + WrapTest(0, 0xFFFFFFFF - 3000, drop_seq_numbers, false, true); +} + +TEST_F(NetEqDecodingTest, TimestampAndSequenceNumberWrap) { + // Start with a timestamp and a sequence number that will wrap at the same + // time. + std::set drop_seq_numbers; + WrapTest(0xFFFF - 10, 0xFFFFFFFF - 5000, drop_seq_numbers, true, true); +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/normal.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/normal.cc index 426df687c92f..8d9c020f96d8 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/normal.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/normal.cc @@ -28,7 +28,7 @@ int Normal::Process(const int16_t* input, size_t length, Modes last_mode, int16_t* external_mute_factor_array, - AudioMultiVector* output) { + AudioMultiVector* output) { if (length == 0) { // Nothing to process. output->Clear(); @@ -55,7 +55,7 @@ int Normal::Process(const int16_t* input, expand_->SetParametersForNormalAfterExpand(); // Call Expand. - AudioMultiVector expanded(output->Channels()); + AudioMultiVector expanded(output->Channels()); expand_->Process(&expanded); expand_->Reset(); diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/normal.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/normal.h index 23fcca515da8..df283198ff51 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/normal.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/normal.h @@ -53,7 +53,7 @@ class Normal { int Process(const int16_t* input, size_t length, Modes last_mode, int16_t* external_mute_factor_array, - AudioMultiVector* output); + AudioMultiVector* output); private: int fs_hz_; @@ -65,4 +65,4 @@ class Normal { }; } // namespace webrtc -#endif // SRC_MODULES_AUDIO_CODING_NETEQ4_NORMAL_H_ +#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_NORMAL_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/packet.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/packet.h index 90994a9b7550..4518f91381a5 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/packet.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/packet.h @@ -25,32 +25,49 @@ struct Packet { int payload_length; bool primary; // Primary, i.e., not redundant payload. int waiting_time; + bool sync_packet; // Constructor. Packet() : payload(NULL), payload_length(0), primary(true), - waiting_time(0) { + waiting_time(0), + sync_packet(false) { } // Comparison operators. Establish a packet ordering based on (1) timestamp, - // (2) sequence number, and (3) redundancy. Timestamp and sequence numbers - // are compared taking wrap-around into account. If both timestamp and - // sequence numbers are identical, a primary payload is considered "smaller" - // than a secondary. + // (2) sequence number, (3) regular packet vs sync-packet and (4) redundancy. + // Timestamp and sequence numbers are compared taking wrap-around into + // account. If both timestamp and sequence numbers are identical and one of + // the packets is sync-packet, the regular packet is considered earlier. For + // two regular packets with the same sequence number and timestamp a primary + // payload is considered "smaller" than a secondary. bool operator==(const Packet& rhs) const { return (this->header.timestamp == rhs.header.timestamp && this->header.sequenceNumber == rhs.header.sequenceNumber && - this->primary == rhs.primary); + this->primary == rhs.primary && + this->sync_packet == rhs.sync_packet); } bool operator!=(const Packet& rhs) const { return !operator==(rhs); } bool operator<(const Packet& rhs) const { if (this->header.timestamp == rhs.header.timestamp) { if (this->header.sequenceNumber == rhs.header.sequenceNumber) { - // Timestamp and sequence numbers are identical. Deem left hand side - // to be "smaller" (i.e., "earlier") if it is primary, and right hand - // side is not. + // Timestamp and sequence numbers are identical. A sync packet should + // be recognized "larger" (i.e. "later") compared to a "network packet" + // (regular packet from network not sync-packet). If none of the packets + // are sync-packets, then deem the left hand side to be "smaller" + // (i.e., "earlier") if it is primary, and right hand side is not. + // + // The condition on sync packets to be larger than "network packets," + // given same RTP sequence number and timestamp, guarantees that a + // "network packet" to be inserted in an earlier position into + // |packet_buffer_| compared to a sync packet of same timestamp and + // sequence number. + if (rhs.sync_packet) + return true; + if (this->sync_packet) + return false; return (this->primary && !rhs.primary); } return (static_cast(rhs.header.sequenceNumber diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/packet_buffer.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/packet_buffer.cc index 2b5f28e4a67e..d19abbaa84da 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/packet_buffer.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/packet_buffer.cc @@ -234,19 +234,19 @@ int PacketBuffer::NumSamplesInBuffer(DecoderDatabase* decoder_database, int last_decoded_length) const { PacketList::const_iterator it; int num_samples = 0; + int last_duration = last_decoded_length; for (it = buffer_.begin(); it != buffer_.end(); ++it) { Packet* packet = (*it); AudioDecoder* decoder = decoder_database->GetDecoder(packet->header.payloadType); if (decoder) { - int duration = decoder->PacketDuration(packet->payload, - packet->payload_length); + int duration = packet->sync_packet ? last_duration : + decoder->PacketDuration(packet->payload, packet->payload_length); if (duration >= 0) { - num_samples += duration; - continue; // Go to next packet in loop. + last_duration = duration; // Save the most up-to-date (valid) duration. } } - num_samples += last_decoded_length; + num_samples += last_duration; } return num_samples; } diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/payload_splitter.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/payload_splitter.cc index 62ed5dae789e..56039a57ec6d 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/payload_splitter.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/payload_splitter.cc @@ -163,6 +163,11 @@ int PayloadSplitter::SplitAudio(PacketList* packet_list, if (!info) { return kUnknownPayloadType; } + // No splitting for a sync-packet. + if (packet->sync_packet) { + ++it; + continue; + } PacketList new_packets; switch (info->codec_type) { case kDecoderPCMu: @@ -326,7 +331,6 @@ void PayloadSplitter::SplitBySamples(const Packet* packet, new_packet->primary = packet->primary; new_packet->payload = new uint8_t[len]; memcpy(new_packet->payload, payload_ptr, len); - payload_ptr += len; new_packets->push_back(new_packet); } } diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/preemptive_expand.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/preemptive_expand.cc index 195315fa98d6..c7ce31040a11 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/preemptive_expand.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/preemptive_expand.cc @@ -20,7 +20,7 @@ PreemptiveExpand::ReturnCodes PreemptiveExpand::Process( const int16_t* input, int input_length, int old_data_length, - AudioMultiVector* output, + AudioMultiVector* output, int16_t* length_change_samples) { old_data_length_per_channel_ = old_data_length; // Input length must be (almost) 30 ms. @@ -56,7 +56,7 @@ void PreemptiveExpand::SetParametersForPassiveSpeech(size_t len, PreemptiveExpand::ReturnCodes PreemptiveExpand::CheckCriteriaAndStretch( const int16_t *input, size_t input_length, size_t peak_index, int16_t best_correlation, bool active_speech, - AudioMultiVector* output) const { + AudioMultiVector* output) const { // Pre-calculate common multiplication with |fs_mult_|. // 120 corresponds to 15 ms. int fs_mult_120 = fs_mult_ * 120; @@ -75,7 +75,7 @@ PreemptiveExpand::ReturnCodes PreemptiveExpand::CheckCriteriaAndStretch( output->PushBackInterleaved( input, (unmodified_length + peak_index) * num_channels_); // Copy the last |peak_index| samples up to 15 ms to |temp_vector|. - AudioMultiVector temp_vector(num_channels_); + AudioMultiVector temp_vector(num_channels_); temp_vector.PushBackInterleaved( &input[(unmodified_length - peak_index) * num_channels_], peak_index * num_channels_); @@ -98,4 +98,11 @@ PreemptiveExpand::ReturnCodes PreemptiveExpand::CheckCriteriaAndStretch( } } +PreemptiveExpand* PreemptiveExpandFactory::Create( + int sample_rate_hz, + size_t num_channels, + const BackgroundNoise& background_noise) const { + return new PreemptiveExpand(sample_rate_hz, num_channels, background_noise); +} + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/preemptive_expand.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/preemptive_expand.h index 1bb610ffde79..241425e81873 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/preemptive_expand.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/preemptive_expand.h @@ -46,7 +46,7 @@ class PreemptiveExpand : public TimeStretch { ReturnCodes Process(const int16_t *pw16_decoded, int len, int old_data_len, - AudioMultiVector* output, + AudioMultiVector* output, int16_t* length_change_samples); protected: @@ -61,7 +61,7 @@ class PreemptiveExpand : public TimeStretch { virtual ReturnCodes CheckCriteriaAndStretch( const int16_t *pw16_decoded, size_t len, size_t w16_bestIndex, int16_t w16_bestCorr, bool w16_VAD, - AudioMultiVector* output) const; + AudioMultiVector* output) const; private: int old_data_length_per_channel_; @@ -70,5 +70,15 @@ class PreemptiveExpand : public TimeStretch { DISALLOW_COPY_AND_ASSIGN(PreemptiveExpand); }; +struct PreemptiveExpandFactory { + PreemptiveExpandFactory() {} + virtual ~PreemptiveExpandFactory() {} + + virtual PreemptiveExpand* Create( + int sample_rate_hz, + size_t num_channels, + const BackgroundNoise& background_noise) const; +}; + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_PREEMPTIVE_EXPAND_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/random_vector.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/random_vector.cc index 823909f13528..e7a5a1d1bcc5 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/random_vector.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/random_vector.cc @@ -54,4 +54,4 @@ void RandomVector::IncreaseSeedIncrement(int16_t increase_by) { seed_increment_+= increase_by; seed_increment_ &= kRandomTableSize - 1; } -} +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/rtcp.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/rtcp.cc index 3572471460c4..bc178fc3aa62 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/rtcp.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/rtcp.cc @@ -12,6 +12,8 @@ #include +#include + #include "webrtc/common_audio/signal_processing/include/signal_processing_library.h" #include "webrtc/modules/interface/module_common_types.h" @@ -54,12 +56,14 @@ void Rtcp::Update(const RTPHeader& rtp_header, uint32_t receive_timestamp) { void Rtcp::GetStatistics(bool no_reset, RtcpStatistics* stats) { // Extended highest sequence number received. - stats->extended_max = (static_cast(cycles_) << 16) + max_seq_no_; + stats->extended_max_sequence_number = + (static_cast(cycles_) << 16) + max_seq_no_; // Calculate expected number of packets and compare it with the number of // packets that were actually received. The cumulative number of lost packets // can be extracted. - uint32_t expected_packets = stats->extended_max - base_seq_no_ + 1; + uint32_t expected_packets = + stats->extended_max_sequence_number - base_seq_no_ + 1; if (received_packets_ == 0) { // No packets received, assume none lost. stats->cumulative_lost = 0; @@ -83,10 +87,7 @@ void Rtcp::GetStatistics(bool no_reset, RtcpStatistics* stats) { if (expected_since_last == 0 || lost <= 0 || received_packets_ == 0) { stats->fraction_lost = 0; } else { - stats->fraction_lost = (lost << 8) / expected_since_last; - } - if (stats->fraction_lost > 0xFF) { - stats->fraction_lost = 0xFF; + stats->fraction_lost = std::min(0xFFU, (lost << 8) / expected_since_last); } stats->jitter = jitter_ >> 4; // Scaling from Q4. diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/sync_buffer.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/sync_buffer.cc index 72fa16af1763..75ee6ece0825 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/sync_buffer.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/sync_buffer.cc @@ -20,10 +20,10 @@ size_t SyncBuffer::FutureLength() const { return Size() - next_index_; } -void SyncBuffer::PushBack(const AudioMultiVector& append_this) { +void SyncBuffer::PushBack(const AudioMultiVector& append_this) { size_t samples_added = append_this.Size(); - AudioMultiVector::PushBack(append_this); - AudioMultiVector::PopFront(samples_added); + AudioMultiVector::PushBack(append_this); + AudioMultiVector::PopFront(samples_added); if (samples_added <= next_index_) { next_index_ -= samples_added; } else { @@ -44,7 +44,7 @@ void SyncBuffer::PushFrontZeros(size_t length) { void SyncBuffer::InsertZerosAtIndex(size_t length, size_t position) { position = std::min(position, Size()); length = std::min(length, Size() - position); - AudioMultiVector::PopBack(length); + AudioMultiVector::PopBack(length); for (size_t channel = 0; channel < Channels(); ++channel) { channels_[channel]->InsertZerosAt(length, position); } @@ -58,15 +58,15 @@ void SyncBuffer::InsertZerosAtIndex(size_t length, size_t position) { } } -void SyncBuffer::ReplaceAtIndex(const AudioMultiVector& insert_this, +void SyncBuffer::ReplaceAtIndex(const AudioMultiVector& insert_this, size_t length, size_t position) { position = std::min(position, Size()); // Cap |position| in the valid range. length = std::min(length, Size() - position); - AudioMultiVector::OverwriteAt(insert_this, length, position); + AudioMultiVector::OverwriteAt(insert_this, length, position); } -void SyncBuffer::ReplaceAtIndex(const AudioMultiVector& insert_this, +void SyncBuffer::ReplaceAtIndex(const AudioMultiVector& insert_this, size_t position) { ReplaceAtIndex(insert_this, insert_this.Size(), position); } diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/sync_buffer.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/sync_buffer.h index 7add358d5e1c..e1e5daf1b780 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/sync_buffer.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/sync_buffer.h @@ -17,10 +17,10 @@ namespace webrtc { -class SyncBuffer : public AudioMultiVector { +class SyncBuffer : public AudioMultiVector { public: SyncBuffer(size_t channels, size_t length) - : AudioMultiVector(channels, length), + : AudioMultiVector(channels, length), next_index_(length), end_timestamp_(0), dtmf_index_(0) {} @@ -34,7 +34,7 @@ class SyncBuffer : public AudioMultiVector { // the same number of samples from the beginning of the SyncBuffer, to // maintain a constant buffer size. The |next_index_| is updated to reflect // the move of the beginning of "future" data. - void PushBack(const AudioMultiVector& append_this); + void PushBack(const AudioMultiVector& append_this); // Adds |length| zeros to the beginning of each channel. Removes // the same number of samples from the end of the SyncBuffer, to @@ -56,13 +56,13 @@ class SyncBuffer : public AudioMultiVector { // and |position| are selected such that the new data would extend beyond the // end of the current SyncBuffer, the buffer is not extended. // The |next_index_| is not updated. - virtual void ReplaceAtIndex(const AudioMultiVector& insert_this, + virtual void ReplaceAtIndex(const AudioMultiVector& insert_this, size_t length, size_t position); // Same as the above method, but where all of |insert_this| is written (with // the same constraints as above, that the SyncBuffer is not extended). - virtual void ReplaceAtIndex(const AudioMultiVector& insert_this, + virtual void ReplaceAtIndex(const AudioMultiVector& insert_this, size_t position); // Reads |requested_len| samples from each channel and writes them interleaved @@ -78,7 +78,7 @@ class SyncBuffer : public AudioMultiVector { // created. void Flush(); - const AudioVector& Channel(size_t n) { return *channels_[n]; } + const AudioVector& Channel(size_t n) { return *channels_[n]; } // Accessors and mutators. size_t next_index() const { return next_index_; } diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/sync_buffer_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/sync_buffer_unittest.cc index 054e69a76580..1aafa22ab888 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/sync_buffer_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/sync_buffer_unittest.cc @@ -53,7 +53,7 @@ TEST(SyncBuffer, PushBackAndFlush) { static const size_t kChannels = 2; SyncBuffer sync_buffer(kChannels, kLen); static const size_t kNewLen = 10; - AudioMultiVector new_data(kChannels, kNewLen); + AudioMultiVector new_data(kChannels, kNewLen); // Populate |new_data|. for (size_t channel = 0; channel < kChannels; ++channel) { for (size_t i = 0; i < kNewLen; ++i) { @@ -93,7 +93,7 @@ TEST(SyncBuffer, PushFrontZeros) { static const size_t kChannels = 2; SyncBuffer sync_buffer(kChannels, kLen); static const size_t kNewLen = 10; - AudioMultiVector new_data(kChannels, kNewLen); + AudioMultiVector new_data(kChannels, kNewLen); // Populate |new_data|. for (size_t channel = 0; channel < kChannels; ++channel) { for (size_t i = 0; i < kNewLen; ++i) { @@ -126,7 +126,7 @@ TEST(SyncBuffer, GetNextAudioInterleaved) { static const size_t kChannels = 2; SyncBuffer sync_buffer(kChannels, kLen); static const size_t kNewLen = 10; - AudioMultiVector new_data(kChannels, kNewLen); + AudioMultiVector new_data(kChannels, kNewLen); // Populate |new_data|. for (size_t channel = 0; channel < kChannels; ++channel) { for (size_t i = 0; i < kNewLen; ++i) { diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_DummyRTPpacket.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_DummyRTPpacket.cc index 5c47f2cf299a..e1750912cdcb 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_DummyRTPpacket.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_DummyRTPpacket.cc @@ -29,108 +29,113 @@ int NETEQTEST_DummyRTPpacket::readFromFile(FILE *fp) uint16_t length, plen; uint32_t offset; + int packetLen; - if (fread(&length, 2, 1, fp) == 0) - { - reset(); - return -2; - } - length = ntohs(length); + bool readNextPacket = true; + while (readNextPacket) { + readNextPacket = false; + if (fread(&length, 2, 1, fp) == 0) + { + reset(); + return -2; + } + length = ntohs(length); - if (fread(&plen, 2, 1, fp) == 0) - { - reset(); - return -1; - } - int packetLen = ntohs(plen); - - if (fread(&offset, 4, 1, fp) == 0) - { - reset(); - return -1; - } - // Store in local variable until we have passed the reset below. - uint32_t receiveTime = ntohl(offset); - - // Use length here because a plen of 0 specifies rtcp. - length = (uint16_t) (length - _kRDHeaderLen); - - // check buffer size - if (_datagram && _memSize < length) - { - reset(); - } - - if (!_datagram) - { - _datagram = new uint8_t[length]; - _memSize = length; - } - memset(_datagram, 0, length); - - if (length == 0) - { - _datagramLen = 0; - _rtpParsed = false; - return packetLen; - } - - // Read basic header - if (fread((unsigned short *) _datagram, 1, _kBasicHeaderLen, fp) - != (size_t)_kBasicHeaderLen) - { - reset(); - return -1; - } - _receiveTime = receiveTime; - _datagramLen = _kBasicHeaderLen; - - // Parse the basic header - webrtc::WebRtcRTPHeader tempRTPinfo; - int P, X, CC; - parseBasicHeader(&tempRTPinfo, &P, &X, &CC); - - // Check if we have to extend the header - if (X != 0 || CC != 0) - { - int newLen = _kBasicHeaderLen + CC * 4 + X * 4; - assert(_memSize >= newLen); - - // Read extension from file - size_t readLen = newLen - _kBasicHeaderLen; - if (fread((unsigned short *) _datagram + _kBasicHeaderLen, 1, readLen, - fp) != readLen) + if (fread(&plen, 2, 1, fp) == 0) { reset(); return -1; } - _datagramLen = newLen; + packetLen = ntohs(plen); - if (X != 0) + if (fread(&offset, 4, 1, fp) == 0) { - int totHdrLen = calcHeaderLength(X, CC); - assert(_memSize >= totHdrLen); + reset(); + return -1; + } + // Store in local variable until we have passed the reset below. + uint32_t receiveTime = ntohl(offset); + + // Use length here because a plen of 0 specifies rtcp. + length = (uint16_t) (length - _kRDHeaderLen); + + // check buffer size + if (_datagram && _memSize < length + 1) + { + reset(); + } + + if (!_datagram) + { + // Add one extra byte, to be able to fake a dummy payload of 1 byte. + _datagram = new uint8_t[length + 1]; + _memSize = length + 1; + } + memset(_datagram, 0, length + 1); + + if (length == 0) + { + _datagramLen = 0; + _rtpParsed = false; + return packetLen; + } + + // Read basic header + if (fread((unsigned short *) _datagram, 1, _kBasicHeaderLen, fp) + != (size_t)_kBasicHeaderLen) + { + reset(); + return -1; + } + _receiveTime = receiveTime; + _datagramLen = _kBasicHeaderLen; + + // Parse the basic header + webrtc::WebRtcRTPHeader tempRTPinfo; + int P, X, CC; + parseBasicHeader(&tempRTPinfo, &P, &X, &CC); + + // Check if we have to extend the header + if (X != 0 || CC != 0) + { + int newLen = _kBasicHeaderLen + CC * 4 + X * 4; + assert(_memSize >= newLen); // Read extension from file - size_t readLen = totHdrLen - newLen; - if (fread((unsigned short *) _datagram + newLen, 1, readLen, fp) - != readLen) + size_t readLen = newLen - _kBasicHeaderLen; + if (fread(&_datagram[_kBasicHeaderLen], 1, readLen, fp) != readLen) { reset(); return -1; } - _datagramLen = totHdrLen; - } - } - _datagramLen = length; + _datagramLen = newLen; - if (!_blockList.empty() && _blockList.count(payloadType()) > 0) - { - // discard this payload - return readFromFile(fp); + if (X != 0) + { + int totHdrLen = calcHeaderLength(X, CC); + assert(_memSize >= totHdrLen); + + // Read extension from file + size_t readLen = totHdrLen - newLen; + if (fread(&_datagram[newLen], 1, readLen, fp) != readLen) + { + reset(); + return -1; + } + _datagramLen = totHdrLen; + } + } + _datagramLen = length; + + if (!_blockList.empty() && _blockList.count(payloadType()) > 0) + { + readNextPacket = true; + } } _rtpParsed = false; + assert(_memSize > _datagramLen); + _payloadLen = 1; // Set the length to 1 byte. return packetLen; } @@ -191,3 +196,9 @@ int NETEQTEST_DummyRTPpacket::writeToFile(FILE *fp) } +void NETEQTEST_DummyRTPpacket::parseHeader() { + NETEQTEST_RTPpacket::parseHeader(); + // Change _payloadLen to 1 byte. The memory should always be big enough. + assert(_memSize > _datagramLen); + _payloadLen = 1; +} diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_DummyRTPpacket.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_DummyRTPpacket.h index 9dcece780b25..9f09c9482754 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_DummyRTPpacket.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_DummyRTPpacket.h @@ -17,6 +17,7 @@ class NETEQTEST_DummyRTPpacket : public NETEQTEST_RTPpacket { public: virtual int readFromFile(FILE* fp) OVERRIDE; virtual int writeToFile(FILE* fp) OVERRIDE; + virtual void parseHeader() OVERRIDE; }; #endif // NETEQTEST_DUMMYRTPPACKET_H diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_RTPpacket.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_RTPpacket.cc index c6d3270f043b..22f18efde62c 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_RTPpacket.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_RTPpacket.cc @@ -105,56 +105,61 @@ int NETEQTEST_RTPpacket::readFromFile(FILE *fp) uint16_t length, plen; uint32_t offset; + int packetLen; - if (fread(&length,2,1,fp)==0) - { - reset(); - return(-2); - } - length = ntohs(length); + bool readNextPacket = true; + while (readNextPacket) { + readNextPacket = false; + if (fread(&length,2,1,fp)==0) + { + reset(); + return(-2); + } + length = ntohs(length); - if (fread(&plen,2,1,fp)==0) - { - reset(); - return(-1); - } - int packetLen = ntohs(plen); + if (fread(&plen,2,1,fp)==0) + { + reset(); + return(-1); + } + packetLen = ntohs(plen); - if (fread(&offset,4,1,fp)==0) - { - reset(); - return(-1); - } - uint32_t receiveTime = ntohl(offset); // store in local variable until we have passed the reset below + if (fread(&offset,4,1,fp)==0) + { + reset(); + return(-1); + } + // store in local variable until we have passed the reset below + uint32_t receiveTime = ntohl(offset); - // Use length here because a plen of 0 specifies rtcp - length = (uint16_t) (length - _kRDHeaderLen); + // Use length here because a plen of 0 specifies rtcp + length = (uint16_t) (length - _kRDHeaderLen); - // check buffer size - if (_datagram && _memSize < length) - { - reset(); - } + // check buffer size + if (_datagram && _memSize < length) + { + reset(); + } - if (!_datagram) - { - _datagram = new uint8_t[length]; - _memSize = length; - } + if (!_datagram) + { + _datagram = new uint8_t[length]; + _memSize = length; + } - if (fread((unsigned short *) _datagram,1,length,fp) != length) - { - reset(); - return(-1); - } + if (fread((unsigned short *) _datagram,1,length,fp) != length) + { + reset(); + return(-1); + } - _datagramLen = length; - _receiveTime = receiveTime; + _datagramLen = length; + _receiveTime = receiveTime; - if (!_blockList.empty() && _blockList.count(payloadType()) > 0) - { - // discard this payload - return(readFromFile(fp)); + if (!_blockList.empty() && _blockList.count(payloadType()) > 0) + { + readNextPacket = true; + } } _rtpParsed = false; diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_RTPpacket.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_RTPpacket.h index 684a1607b2eb..8a31274aba1a 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_RTPpacket.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_RTPpacket.h @@ -37,7 +37,7 @@ public: virtual int writeToFile(FILE *fp); void blockPT(uint8_t pt); //int16_t payloadType(); - void parseHeader(); + virtual void parseHeader(); void parseHeader(webrtc::WebRtcRTPHeader* rtp_header); const webrtc::WebRtcRTPHeader* RTPinfo() const; uint8_t * datagram() const; diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/RTPencode.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/RTPencode.cc index 7368b0265dc6..bc806091b0a1 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/RTPencode.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/RTPencode.cc @@ -1618,7 +1618,7 @@ int NetEQTest_encode(int coder, int16_t *indata, int frameLen, unsigned char * e #ifdef CODEC_G722 else if (coder==webrtc::kDecoderG722) { /*g722 */ cdlen=WebRtcG722_Encode(g722EncState[k], indata, frameLen, (int16_t*)encoded); - cdlen=frameLen>>1; + assert(cdlen == frameLen>>1); } #endif #ifdef CODEC_ILBC diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/RTPjitter.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/RTPjitter.cc index 79af181fbe43..eeb4c90140cb 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/RTPjitter.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/RTPjitter.cc @@ -64,7 +64,9 @@ int main(int argc, char* argv[]) unsigned int dat_len, rtp_len, Npack, k; arr_time *time_vec; char firstline[FIRSTLINELEN]; - unsigned char *rtp_vec = NULL, **packet_ptr, *temp_packet; + unsigned char* rtp_vec = NULL; + unsigned char** packet_ptr = NULL; + unsigned char* temp_packet = NULL; const unsigned int kRtpDumpHeaderSize = 4 + 4 + 4 + 2 + 2; uint16_t len; uint32_t *offset; @@ -113,6 +115,11 @@ int main(int argc, char* argv[]) dat_len++; } + if (dat_len == 0) { + fprintf(stderr, "Error: dat_file is empty, no arrival time is given.\n"); + goto closing; + } + qsort(time_vec,dat_len,sizeof(arr_time),compare_arr_time); @@ -146,6 +153,11 @@ int main(int argc, char* argv[]) len=(uint16_t) fread(&rtp_vec[rtp_len], sizeof(unsigned char), 2, in_file); // read length of next packet } + if (Npack == 0) { + fprintf(stderr, "Error: No RTP packet found.\n"); + goto closing; + } + packet_ptr = (unsigned char **) malloc(Npack*sizeof(unsigned char*)); packet_ptr[0]=rtp_vec; @@ -182,7 +194,10 @@ int main(int argc, char* argv[]) closing: free(time_vec); free(rtp_vec); - fclose(in_file); + if (packet_ptr != NULL) { + free(packet_ptr); + } + fclose(in_file); fclose(dat_file); fclose(out_file); diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/RTPtimeshift.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/RTPtimeshift.cc index ba3a08ee0aec..15ffdf6a511e 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/RTPtimeshift.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/RTPtimeshift.cc @@ -68,18 +68,20 @@ int main(int argc, char* argv[]) uint32_t ATdiff = 0; if (argc > 4) { - if (argv[4] >= 0) - SNdiff = atoi(argv[4]) - packet.sequenceNumber(); + int startSN = atoi(argv[4]); + if (startSN >= 0) + SNdiff = startSN - packet.sequenceNumber(); if (argc > 5) { - if (argv[5] >= 0) - ATdiff = atoi(argv[5]) - packet.time(); + int startTS = atoi(argv[5]); + if (startTS >= 0) + ATdiff = startTS - packet.time(); } } while (packLen >= 0) { - + packet.setTimeStamp(packet.timeStamp() + TSdiff); packet.setSequenceNumber(packet.sequenceNumber() + SNdiff); packet.setTime(packet.time() + ATdiff); diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/neteq_performance_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/neteq_performance_unittest.cc new file mode 100644 index 000000000000..f669742ce0e6 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/neteq_performance_unittest.cc @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "testing/gtest/include/gtest/gtest.h" +#include "webrtc/modules/audio_coding/neteq4/tools/neteq_performance_test.h" +#include "webrtc/test/testsupport/perf_test.h" +#include "webrtc/typedefs.h" + +// Runs a test with 10% packet losses and 10% clock drift, to exercise +// both loss concealment and time-stretching code. +TEST(NetEqPerformanceTest, Run) { + const int kSimulationTimeMs = 10000000; + const int kLossPeriod = 10; // Drop every 10th packet. + const double kDriftFactor = 0.1; + int64_t runtime = webrtc::test::NetEqPerformanceTest::Run( + kSimulationTimeMs, kLossPeriod, kDriftFactor); + ASSERT_GT(runtime, 0); + webrtc::test::PrintResult( + "neteq_performance", "", "10_pl_10_drift", runtime, "ms", true); +} + +// Runs a test with neither packet losses nor clock drift, to put +// emphasis on the "good-weather" code path, which is presumably much +// more lightweight. +TEST(NetEqPerformanceTest, RunClean) { + const int kSimulationTimeMs = 10000000; + const int kLossPeriod = 0; // No losses. + const double kDriftFactor = 0.0; // No clock drift. + int64_t runtime = webrtc::test::NetEqPerformanceTest::Run( + kSimulationTimeMs, kLossPeriod, kDriftFactor); + ASSERT_GT(runtime, 0); + webrtc::test::PrintResult( + "neteq_performance", "", "0_pl_0_drift", runtime, "ms", true); +} diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/neteq_speed_test.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/neteq_speed_test.cc index 34f0de44e311..cecd48b7012b 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/neteq_speed_test.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/test/neteq_speed_test.cc @@ -13,18 +13,9 @@ #include #include "gflags/gflags.h" -#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h" -#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h" -#include "webrtc/modules/audio_coding/neteq4/tools/audio_loop.h" -#include "webrtc/modules/audio_coding/neteq4/tools/rtp_generator.h" -#include "webrtc/test/testsupport/fileutils.h" +#include "webrtc/modules/audio_coding/neteq4/tools/neteq_performance_test.h" #include "webrtc/typedefs.h" -using webrtc::NetEq; -using webrtc::test::AudioLoop; -using webrtc::test::RtpGenerator; -using webrtc::WebRtcRTPHeader; - // Flag validators. static bool ValidateRuntime(const char* flagname, int value) { if (value > 0) // Value is ok. @@ -59,15 +50,6 @@ static const bool drift_dummy = google::RegisterFlagValidator(&FLAGS_drift, &ValidateDriftfactor); int main(int argc, char* argv[]) { - static const int kMaxChannels = 1; - static const int kMaxSamplesPerMs = 48000 / 1000; - static const int kOutputBlockSizeMs = 10; - const std::string kInputFileName = - webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm"); - const int kSampRateHz = 32000; - const webrtc::NetEqDecoder kDecoderType = webrtc::kDecoderPCM16Bswb32kHz; - const int kPayloadType = 95; - std::string program_name = argv[0]; std::string usage = "Tool for measuring the speed of NetEq.\n" "Usage: " + program_name + " [options]\n\n" @@ -84,101 +66,15 @@ int main(int argc, char* argv[]) { return 0; } - // Initialize NetEq instance. - NetEq* neteq = NetEq::Create(kSampRateHz); - // Register decoder in |neteq|. - int error; - error = neteq->RegisterPayloadType(kDecoderType, kPayloadType); - if (error) { - std::cerr << "Cannot register decoder." << std::endl; - exit(1); - } - - // Set up AudioLoop object. - AudioLoop audio_loop; - const size_t kMaxLoopLengthSamples = kSampRateHz * 10; // 10 second loop. - const size_t kInputBlockSizeSamples = 60 * kSampRateHz / 1000; // 60 ms. - if (!audio_loop.Init(kInputFileName, kMaxLoopLengthSamples, - kInputBlockSizeSamples)) { - std::cerr << "Cannot initialize AudioLoop object." << std::endl; - exit(1); - } - - int32_t time_now_ms = 0; - - // Get first input packet. - WebRtcRTPHeader rtp_header; - RtpGenerator rtp_gen(kSampRateHz / 1000); - // Start with positive drift first half of simulation. - double drift_factor = 0.1; - rtp_gen.set_drift_factor(drift_factor); - bool drift_flipped = false; - int32_t packet_input_time_ms = - rtp_gen.GetRtpHeader(kPayloadType, kInputBlockSizeSamples, &rtp_header); - const int16_t* input_samples = audio_loop.GetNextBlock(); - if (!input_samples) exit(1); - uint8_t input_payload[kInputBlockSizeSamples * sizeof(int16_t)]; - int payload_len = WebRtcPcm16b_Encode(const_cast(input_samples), - kInputBlockSizeSamples, - input_payload); - assert(payload_len == kInputBlockSizeSamples * sizeof(int16_t)); - - // Main loop. - while (time_now_ms < FLAGS_runtime_ms) { - while (packet_input_time_ms <= time_now_ms) { - // Drop every N packets, where N = FLAGS_lossrate. - bool lost = false; - if (FLAGS_lossrate > 0) { - lost = ((rtp_header.header.sequenceNumber - 1) % FLAGS_lossrate) == 0; - } - if (!lost) { - // Insert packet. - int error = neteq->InsertPacket( - rtp_header, input_payload, payload_len, - packet_input_time_ms * kSampRateHz / 1000); - if (error != NetEq::kOK) { - std::cerr << "InsertPacket returned error code " << - neteq->LastError() << std::endl; - exit(1); - } - } - - // Get next packet. - packet_input_time_ms = rtp_gen.GetRtpHeader(kPayloadType, - kInputBlockSizeSamples, - &rtp_header); - input_samples = audio_loop.GetNextBlock(); - if (!input_samples) exit(1); - payload_len = WebRtcPcm16b_Encode(const_cast(input_samples), - kInputBlockSizeSamples, - input_payload); - assert(payload_len == kInputBlockSizeSamples * sizeof(int16_t)); - } - - // Get output audio, but don't do anything with it. - static const int kOutDataLen = kOutputBlockSizeMs * kMaxSamplesPerMs * - kMaxChannels; - int16_t out_data[kOutDataLen]; - int num_channels; - int samples_per_channel; - int error = neteq->GetAudio(kOutDataLen, out_data, &samples_per_channel, - &num_channels, NULL); - if (error != NetEq::kOK) { - std::cerr << "GetAudio returned error code " << - neteq->LastError() << std::endl; - exit(1); - } - assert(samples_per_channel == kSampRateHz * 10 / 1000); - - time_now_ms += kOutputBlockSizeMs; - if (time_now_ms >= FLAGS_runtime_ms / 2 && !drift_flipped) { - // Apply negative drift second half of simulation. - rtp_gen.set_drift_factor(-drift_factor); - drift_flipped = true; - } + int64_t result = + webrtc::test::NetEqPerformanceTest::Run(FLAGS_runtime_ms, FLAGS_lossrate, + FLAGS_drift); + if (result <= 0) { + std::cout << "There was an error" << std::endl; + return -1; } std::cout << "Simulation done" << std::endl; - delete neteq; + std::cout << "Runtime = " << result << " ms" << std::endl; return 0; } diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/time_stretch.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/time_stretch.cc index a2486987b383..5b6b3ba96665 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/time_stretch.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/time_stretch.cc @@ -22,7 +22,7 @@ namespace webrtc { TimeStretch::ReturnCodes TimeStretch::Process( const int16_t* input, size_t input_len, - AudioMultiVector* output, + AudioMultiVector* output, int16_t* length_change_samples) { // Pre-calculate common multiplication with |fs_mult_|. diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/time_stretch.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/time_stretch.h index 4741da23912a..f0f58b83ad93 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/time_stretch.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/time_stretch.h @@ -58,7 +58,7 @@ class TimeStretch { // PreemptiveExpand. ReturnCodes Process(const int16_t* input, size_t input_len, - AudioMultiVector* output, + AudioMultiVector* output, int16_t* length_change_samples); protected: @@ -75,7 +75,7 @@ class TimeStretch { virtual ReturnCodes CheckCriteriaAndStretch( const int16_t* input, size_t input_length, size_t peak_index, int16_t best_correlation, bool active_speech, - AudioMultiVector* output) const = 0; + AudioMultiVector* output) const = 0; static const int kCorrelationLen = 50; static const int kLogCorrelationLen = 6; // >= log2(kCorrelationLen). diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/time_stretch_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/time_stretch_unittest.cc index cf8131f3a06f..188c18b71c33 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/time_stretch_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/time_stretch_unittest.cc @@ -19,11 +19,29 @@ namespace webrtc { TEST(TimeStretch, CreateAndDestroy) { - int sample_rate = 8000; - size_t num_channels = 1; - BackgroundNoise bgn(num_channels); - Accelerate accelerate(sample_rate, num_channels, bgn); - PreemptiveExpand preemptive_expand(sample_rate, num_channels, bgn); + const int kSampleRate = 8000; + const size_t kNumChannels = 1; + BackgroundNoise bgn(kNumChannels); + Accelerate accelerate(kSampleRate, kNumChannels, bgn); + PreemptiveExpand preemptive_expand(kSampleRate, kNumChannels, bgn); +} + +TEST(TimeStretch, CreateUsingFactory) { + const int kSampleRate = 8000; + const size_t kNumChannels = 1; + BackgroundNoise bgn(kNumChannels); + + AccelerateFactory accelerate_factory; + Accelerate* accelerate = + accelerate_factory.Create(kSampleRate, kNumChannels, bgn); + EXPECT_TRUE(accelerate != NULL); + delete accelerate; + + PreemptiveExpandFactory preemptive_expand_factory; + PreemptiveExpand* preemptive_expand = + preemptive_expand_factory.Create(kSampleRate, kNumChannels, bgn); + EXPECT_TRUE(preemptive_expand != NULL); + delete preemptive_expand; } // TODO(hlundin): Write more tests. diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/timestamp_scaler.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/timestamp_scaler.cc index 423edeeb4a08..b2b5b40a3a61 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/timestamp_scaler.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/timestamp_scaler.cc @@ -85,7 +85,7 @@ uint32_t TimestampScaler::ToInternal(uint32_t external_timestamp, assert(denominator_ > 0); // Should not be possible. external_ref_ = external_timestamp; internal_ref_ += (external_diff * numerator_) / denominator_; - NETEQ_LOG_VERBOSE << "Converting timestamp: " << external_timestamp << + LOG(LS_VERBOSE) << "Converting timestamp: " << external_timestamp << " -> " << internal_ref_; return internal_ref_; } else { diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/tools/neteq_performance_test.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/tools/neteq_performance_test.cc new file mode 100644 index 000000000000..203ea040cc8b --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/tools/neteq_performance_test.cc @@ -0,0 +1,130 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/audio_coding/neteq4/tools/neteq_performance_test.h" + +#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h" +#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h" +#include "webrtc/modules/audio_coding/neteq4/tools/audio_loop.h" +#include "webrtc/modules/audio_coding/neteq4/tools/rtp_generator.h" +#include "webrtc/system_wrappers/interface/clock.h" +#include "webrtc/test/testsupport/fileutils.h" +#include "webrtc/typedefs.h" + +using webrtc::NetEq; +using webrtc::test::AudioLoop; +using webrtc::test::RtpGenerator; +using webrtc::WebRtcRTPHeader; + +namespace webrtc { +namespace test { + +int64_t NetEqPerformanceTest::Run(int runtime_ms, + int lossrate, + double drift_factor) { + const std::string kInputFileName = + webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm"); + const int kSampRateHz = 32000; + const webrtc::NetEqDecoder kDecoderType = webrtc::kDecoderPCM16Bswb32kHz; + const int kPayloadType = 95; + + // Initialize NetEq instance. + NetEq* neteq = NetEq::Create(kSampRateHz); + // Register decoder in |neteq|. + if (neteq->RegisterPayloadType(kDecoderType, kPayloadType) != 0) + return -1; + + // Set up AudioLoop object. + AudioLoop audio_loop; + const size_t kMaxLoopLengthSamples = kSampRateHz * 10; // 10 second loop. + const size_t kInputBlockSizeSamples = 60 * kSampRateHz / 1000; // 60 ms. + if (!audio_loop.Init(kInputFileName, kMaxLoopLengthSamples, + kInputBlockSizeSamples)) + return -1; + + int32_t time_now_ms = 0; + + // Get first input packet. + WebRtcRTPHeader rtp_header; + RtpGenerator rtp_gen(kSampRateHz / 1000); + // Start with positive drift first half of simulation. + rtp_gen.set_drift_factor(drift_factor); + bool drift_flipped = false; + int32_t packet_input_time_ms = + rtp_gen.GetRtpHeader(kPayloadType, kInputBlockSizeSamples, &rtp_header); + const int16_t* input_samples = audio_loop.GetNextBlock(); + if (!input_samples) exit(1); + uint8_t input_payload[kInputBlockSizeSamples * sizeof(int16_t)]; + int payload_len = WebRtcPcm16b_Encode(const_cast(input_samples), + kInputBlockSizeSamples, + input_payload); + assert(payload_len == kInputBlockSizeSamples * sizeof(int16_t)); + + // Main loop. + webrtc::Clock* clock = webrtc::Clock::GetRealTimeClock(); + int64_t start_time_ms = clock->TimeInMilliseconds(); + while (time_now_ms < runtime_ms) { + while (packet_input_time_ms <= time_now_ms) { + // Drop every N packets, where N = FLAGS_lossrate. + bool lost = false; + if (lossrate > 0) { + lost = ((rtp_header.header.sequenceNumber - 1) % lossrate) == 0; + } + if (!lost) { + // Insert packet. + int error = neteq->InsertPacket( + rtp_header, input_payload, payload_len, + packet_input_time_ms * kSampRateHz / 1000); + if (error != NetEq::kOK) + return -1; + } + + // Get next packet. + packet_input_time_ms = rtp_gen.GetRtpHeader(kPayloadType, + kInputBlockSizeSamples, + &rtp_header); + input_samples = audio_loop.GetNextBlock(); + if (!input_samples) return -1; + payload_len = WebRtcPcm16b_Encode(const_cast(input_samples), + kInputBlockSizeSamples, + input_payload); + assert(payload_len == kInputBlockSizeSamples * sizeof(int16_t)); + } + + // Get output audio, but don't do anything with it. + static const int kMaxChannels = 1; + static const int kMaxSamplesPerMs = 48000 / 1000; + static const int kOutputBlockSizeMs = 10; + static const int kOutDataLen = kOutputBlockSizeMs * kMaxSamplesPerMs * + kMaxChannels; + int16_t out_data[kOutDataLen]; + int num_channels; + int samples_per_channel; + int error = neteq->GetAudio(kOutDataLen, out_data, &samples_per_channel, + &num_channels, NULL); + if (error != NetEq::kOK) + return -1; + + assert(samples_per_channel == kSampRateHz * 10 / 1000); + + time_now_ms += kOutputBlockSizeMs; + if (time_now_ms >= runtime_ms / 2 && !drift_flipped) { + // Apply negative drift second half of simulation. + rtp_gen.set_drift_factor(-drift_factor); + drift_flipped = true; + } + } + int64_t end_time_ms = clock->TimeInMilliseconds(); + delete neteq; + return end_time_ms - start_time_ms; +} + +} // namespace test +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/tools/neteq_performance_test.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/tools/neteq_performance_test.h new file mode 100644 index 000000000000..1b205c03c10c --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/tools/neteq_performance_test.h @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TOOLS_NETEQ_PERFORMANCE_TEST_H_ +#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TOOLS_NETEQ_PERFORMANCE_TEST_H_ + +#include "webrtc/typedefs.h" + +namespace webrtc { +namespace test { + +class NetEqPerformanceTest { + public: + // Runs a performance test with parameters as follows: + // |runtime_ms|: the simulation time, i.e., the duration of the audio data. + // |lossrate|: drop one out of |lossrate| packets, e.g., one out of 10. + // |drift_factor|: clock drift in [0, 1]. + // Returns the runtime in ms. + static int64_t Run(int runtime_ms, int lossrate, double drift_factor); +}; + +} // namespace test +} // namespace webrtc + +#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TOOLS_NETEQ_PERFORMANCE_TEST_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/tools/neteq_rtpplay.cc b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/tools/neteq_rtpplay.cc index f0ca51f2f702..d2c4a5cb41ec 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/tools/neteq_rtpplay.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq4/tools/neteq_rtpplay.cc @@ -8,6 +8,9 @@ * be found in the AUTHORS file in the root of the source tree. */ +// TODO(hlundin): The functionality in this file should be moved into one or +// several classes. + #include #include @@ -16,10 +19,13 @@ #include #include "google/gflags.h" +#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h" #include "webrtc/modules/audio_coding/neteq4/interface/neteq.h" #include "webrtc/modules/audio_coding/neteq4/test/NETEQTEST_RTPpacket.h" #include "webrtc/modules/audio_coding/neteq4/test/NETEQTEST_DummyRTPpacket.h" +#include "webrtc/modules/audio_coding/neteq4/tools/input_audio_file.h" #include "webrtc/modules/interface/module_common_types.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" #include "webrtc/system_wrappers/interface/trace.h" #include "webrtc/test/testsupport/fileutils.h" #include "webrtc/typedefs.h" @@ -88,11 +94,23 @@ DEFINE_bool(codec_map, false, "Prints the mapping between RTP payload type and " "codec"); DEFINE_bool(dummy_rtp, false, "The input file contains ""dummy"" RTP data, " "i.e., only headers"); +DEFINE_string(replacement_audio_file, "", + "A PCM file that will be used to populate ""dummy"" RTP packets"); // Declaring helper functions (defined further down in this file). std::string CodecName(webrtc::NetEqDecoder codec); void RegisterPayloadTypes(NetEq* neteq); void PrintCodecMapping(); +size_t ReplacePayload(webrtc::test::InputAudioFile* replacement_audio_file, + webrtc::scoped_ptr* replacement_audio, + webrtc::scoped_ptr* payload, + size_t* payload_mem_size_bytes, + size_t* frame_size_samples, + WebRtcRTPHeader* rtp_header, + NETEQTEST_RTPpacket* next_rtp); +int CodecSampleRate(uint8_t payload_type); +int CodecTimestampRate(uint8_t payload_type); +bool IsComfortNosie(uint8_t payload_type); int main(int argc, char* argv[]) { static const int kMaxChannels = 5; @@ -135,6 +153,15 @@ int main(int argc, char* argv[]) { } std::cout << "Output file: " << argv[2] << std::endl; + // Check if a replacement audio file was provided, and if so, open it. + bool replace_payload = false; + webrtc::scoped_ptr replacement_audio_file; + if (!FLAGS_replacement_audio_file.empty()) { + replacement_audio_file.reset( + new webrtc::test::InputAudioFile(FLAGS_replacement_audio_file)); + replace_payload = true; + } + // Read RTP file header. if (NETEQTEST_RTPpacket::skipFileHeader(in_file) != 0) { std::cerr << "Wrong format in RTP file" << std::endl; @@ -153,17 +180,41 @@ int main(int argc, char* argv[]) { RegisterPayloadTypes(neteq); // Read first packet. - NETEQTEST_RTPpacket *rtp; + NETEQTEST_RTPpacket* rtp; + NETEQTEST_RTPpacket* next_rtp = NULL; if (!FLAGS_dummy_rtp) { rtp = new NETEQTEST_RTPpacket(); + if (replace_payload) { + next_rtp = new NETEQTEST_RTPpacket(); + } } else { rtp = new NETEQTEST_DummyRTPpacket(); + if (replace_payload) { + next_rtp = new NETEQTEST_DummyRTPpacket(); + } } rtp->readFromFile(in_file); - if (!rtp) { + if (rtp->dataLen() < 0) { std::cout << "Warning: RTP file is empty" << std::endl; } + // Set up variables for audio replacement if needed. + size_t input_frame_size_timestamps = 0; + webrtc::scoped_ptr replacement_audio; + webrtc::scoped_ptr payload; + size_t payload_mem_size_bytes = 0; + if (replace_payload) { + // Initially assume that the frame size is 30 ms at the initial sample rate. + // This value will be replaced with the correct one as soon as two + // consecutive packets are found. + input_frame_size_timestamps = 30 * sample_rate_hz / 1000; + replacement_audio.reset(new int16_t[input_frame_size_timestamps]); + payload_mem_size_bytes = 2 * input_frame_size_timestamps; + payload.reset(new uint8_t[payload_mem_size_bytes]); + assert(next_rtp); + next_rtp->readFromFile(in_file); + } + // This is the main simulation loop. int time_now_ms = rtp->time(); // Start immediately with the first packet. int next_input_time_ms = rtp->time(); @@ -181,9 +232,21 @@ int main(int argc, char* argv[]) { // Parse RTP header. WebRtcRTPHeader rtp_header; rtp->parseHeader(&rtp_header); - int error = neteq->InsertPacket(rtp_header, rtp->payload(), - rtp->payloadLen(), - rtp->time() * sample_rate_hz / 1000); + uint8_t* payload_ptr = rtp->payload(); + size_t payload_len = rtp->payloadLen(); + if (replace_payload) { + payload_len = ReplacePayload(replacement_audio_file.get(), + &replacement_audio, + &payload, + &payload_mem_size_bytes, + &input_frame_size_timestamps, + &rtp_header, + next_rtp); + payload_ptr = payload.get(); + } + int error = neteq->InsertPacket(rtp_header, payload_ptr, + static_cast(payload_len), + rtp->time() * sample_rate_hz / 1000); if (error != NetEq::kOK) { std::cerr << "InsertPacket returned error code " << neteq->LastError() << std::endl; @@ -191,6 +254,13 @@ int main(int argc, char* argv[]) { } // Get next packet from file. rtp->readFromFile(in_file); + if (replace_payload) { + // At this point |rtp| contains the packet *after* |next_rtp|. + // Swap RTP packet objects between |rtp| and |next_rtp|. + NETEQTEST_RTPpacket* temp_rtp = rtp; + rtp = next_rtp; + next_rtp = temp_rtp; + } next_input_time_ms = rtp->time(); } @@ -212,6 +282,7 @@ int main(int argc, char* argv[]) { } // Write to file. + // TODO(hlundin): Make writing to file optional. size_t write_len = samples_per_channel * num_channels; if (fwrite(out_data, sizeof(out_data[0]), write_len, out_file) != write_len) { @@ -229,6 +300,8 @@ int main(int argc, char* argv[]) { fclose(in_file); fclose(out_file); + delete rtp; + delete next_rtp; delete neteq; webrtc::Trace::ReturnTrace(); return 0; @@ -421,3 +494,133 @@ void PrintCodecMapping() { std::cout << CodecName(webrtc::kDecoderCNGswb48kHz).c_str() << ": " << FLAGS_cn_swb48 << std::endl; } + +size_t ReplacePayload(webrtc::test::InputAudioFile* replacement_audio_file, + webrtc::scoped_ptr* replacement_audio, + webrtc::scoped_ptr* payload, + size_t* payload_mem_size_bytes, + size_t* frame_size_samples, + WebRtcRTPHeader* rtp_header, + NETEQTEST_RTPpacket* next_rtp) { + size_t payload_len = 0; + // Check for CNG. + if (IsComfortNosie(rtp_header->header.payloadType)) { + // If CNG, simply insert a zero-energy one-byte payload. + if (*payload_mem_size_bytes < 1) { + (*payload).reset(new uint8_t[1]); + *payload_mem_size_bytes = 1; + } + (*payload)[0] = 127; // Max attenuation of CNG. + payload_len = 1; + } else { + if (next_rtp->payloadLen() > 0) { + // Check if payload length has changed. + if (next_rtp->sequenceNumber() == rtp_header->header.sequenceNumber + 1) { + if (*frame_size_samples != + next_rtp->timeStamp() - rtp_header->header.timestamp) { + *frame_size_samples = + next_rtp->timeStamp() - rtp_header->header.timestamp; + (*replacement_audio).reset( + new int16_t[*frame_size_samples]); + *payload_mem_size_bytes = 2 * *frame_size_samples; + (*payload).reset(new uint8_t[*payload_mem_size_bytes]); + } + } + } + // Get new speech. + assert((*replacement_audio).get()); + if (CodecTimestampRate(rtp_header->header.payloadType) != + CodecSampleRate(rtp_header->header.payloadType) || + rtp_header->header.payloadType == FLAGS_red || + rtp_header->header.payloadType == FLAGS_avt) { + // Some codecs have different sample and timestamp rates. And neither + // RED nor DTMF is supported for replacement. + std::cerr << "Codec not supported for audio replacement." << + std::endl; + webrtc::Trace::ReturnTrace(); + exit(1); + } + assert(*frame_size_samples > 0); + if (!replacement_audio_file->Read(*frame_size_samples, + (*replacement_audio).get())) { + std::cerr << "Could no read replacement audio file." << std::endl; + webrtc::Trace::ReturnTrace(); + exit(1); + } + // Encode it as PCM16. + assert((*payload).get()); + payload_len = WebRtcPcm16b_Encode((*replacement_audio).get(), + static_cast(*frame_size_samples), + (*payload).get()); + assert(payload_len == 2 * *frame_size_samples); + // Change payload type to PCM16. + switch (CodecSampleRate(rtp_header->header.payloadType)) { + case 8000: + rtp_header->header.payloadType = FLAGS_pcm16b; + break; + case 16000: + rtp_header->header.payloadType = FLAGS_pcm16b_wb; + break; + case 32000: + rtp_header->header.payloadType = FLAGS_pcm16b_swb32; + break; + case 48000: + rtp_header->header.payloadType = FLAGS_pcm16b_swb48; + break; + default: + std::cerr << "Payload type " << + static_cast(rtp_header->header.payloadType) << + " not supported or unknown." << std::endl; + webrtc::Trace::ReturnTrace(); + exit(1); + assert(false); + } + } + return payload_len; +} + +int CodecSampleRate(uint8_t payload_type) { + if (payload_type == FLAGS_pcmu || + payload_type == FLAGS_pcma || + payload_type == FLAGS_ilbc || + payload_type == FLAGS_pcm16b || + payload_type == FLAGS_cn_nb) { + return 8000; + } else if (payload_type == FLAGS_isac || + payload_type == FLAGS_pcm16b_wb || + payload_type == FLAGS_g722 || + payload_type == FLAGS_cn_wb) { + return 16000; + } else if (payload_type == FLAGS_isac_swb || + payload_type == FLAGS_pcm16b_swb32 || + payload_type == FLAGS_cn_swb32) { + return 32000; + } else if (payload_type == FLAGS_pcm16b_swb48 || + payload_type == FLAGS_cn_swb48) { + return 48000; + } else if (payload_type == FLAGS_avt || + payload_type == FLAGS_red) { + return 0; + } else { + return -1; + } +} + +int CodecTimestampRate(uint8_t payload_type) { + if (payload_type == FLAGS_g722) { + return 8000; + } else { + return CodecSampleRate(payload_type); + } +} + +bool IsComfortNosie(uint8_t payload_type) { + if (payload_type == FLAGS_cn_nb || + payload_type == FLAGS_cn_wb || + payload_type == FLAGS_cn_swb32 || + payload_type == FLAGS_cn_swb48) { + return true; + } else { + return false; + } +} diff --git a/media/webrtc/trunk/webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer.h b/media/webrtc/trunk/webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer.h index 352537d6ef70..2969ecebe662 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer.h +++ b/media/webrtc/trunk/webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer.h @@ -57,7 +57,7 @@ public: // Add/remove participants as candidates for mixing. virtual int32_t SetMixabilityStatus(MixerParticipant& participant, - const bool mixable) = 0; + bool mixable) = 0; // mixable is set to true if a participant is a candidate for mixing. virtual int32_t MixabilityStatus(MixerParticipant& participant, bool& mixable) = 0; diff --git a/media/webrtc/trunk/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc b/media/webrtc/trunk/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc index da16814d1825..f3883c0b58d5 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc @@ -19,6 +19,13 @@ namespace webrtc { namespace { +struct ParticipantFramePair { + MixerParticipant* participant; + AudioFrame* audioFrame; +}; + +typedef std::list ParticipantFramePairList; + // Mix |frame| into |mixed_frame|, with saturation protection and upmixing. // These effects are applied to |frame| itself prior to mixing. Assumes that // |mixed_frame| always has at least as many channels as |frame|. Supports @@ -40,20 +47,18 @@ void MixFrames(AudioFrame* mixed_frame, AudioFrame* frame) { } // Return the max number of channels from a |list| composed of AudioFrames. -int MaxNumChannels(const ListWrapper& list) { - ListItem* item = list.First(); +int MaxNumChannels(const AudioFrameList* list) { int max_num_channels = 1; - while (item) { - AudioFrame* frame = static_cast(item->GetItem()); - max_num_channels = std::max(max_num_channels, frame->num_channels_); - item = list.Next(item); + for (AudioFrameList::const_iterator iter = list->begin(); + iter != list->end(); + ++iter) { + max_num_channels = std::max(max_num_channels, (*iter)->num_channels_); } return max_num_channels; } void SetParticipantStatistics(ParticipantStatistics* stats, - const AudioFrame& frame) -{ + const AudioFrame& frame) { stats->participant = frame.id_; stats->level = 0; // TODO(andrew): to what should this be set? } @@ -61,58 +66,47 @@ void SetParticipantStatistics(ParticipantStatistics* stats, } // namespace MixerParticipant::MixerParticipant() - : _mixHistory(new MixHistory()) -{ + : _mixHistory(new MixHistory()) { } -MixerParticipant::~MixerParticipant() -{ +MixerParticipant::~MixerParticipant() { delete _mixHistory; } -int32_t MixerParticipant::IsMixed(bool& mixed) const -{ +int32_t MixerParticipant::IsMixed(bool& mixed) const { return _mixHistory->IsMixed(mixed); } MixHistory::MixHistory() - : _isMixed(0) -{ + : _isMixed(0) { } -MixHistory::~MixHistory() -{ +MixHistory::~MixHistory() { } -int32_t MixHistory::IsMixed(bool& mixed) const -{ +int32_t MixHistory::IsMixed(bool& mixed) const { mixed = _isMixed; return 0; } -int32_t MixHistory::WasMixed(bool& wasMixed) const -{ +int32_t MixHistory::WasMixed(bool& wasMixed) const { // Was mixed is the same as is mixed depending on perspective. This function // is for the perspective of AudioConferenceMixerImpl. return IsMixed(wasMixed); } -int32_t MixHistory::SetIsMixed(const bool mixed) -{ +int32_t MixHistory::SetIsMixed(const bool mixed) { _isMixed = mixed; return 0; } -void MixHistory::ResetMixedStatus() -{ +void MixHistory::ResetMixedStatus() { _isMixed = false; } -AudioConferenceMixer* AudioConferenceMixer::Create(int id) -{ +AudioConferenceMixer* AudioConferenceMixer::Create(int id) { AudioConferenceMixerImpl* mixer = new AudioConferenceMixerImpl(id); - if(!mixer->Init()) - { + if(!mixer->Init()) { delete mixer; return NULL; } @@ -124,8 +118,6 @@ AudioConferenceMixerImpl::AudioConferenceMixerImpl(int id) _scratchMixedParticipants(), _scratchVadPositiveParticipantsAmount(0), _scratchVadPositiveParticipants(), - _crit(NULL), - _cbCrit(NULL), _id(id), _minimumMixingFreq(kLowestPossible), _mixReceiver(NULL), @@ -142,12 +134,9 @@ AudioConferenceMixerImpl::AudioConferenceMixerImpl(int id) _timeStamp(0), _timeScheduler(kProcessPeriodicityInMs), _mixedAudioLevel(), - _processCalls(0), - _limiter(NULL) -{} + _processCalls(0) {} -bool AudioConferenceMixerImpl::Init() -{ +bool AudioConferenceMixerImpl::Init() { _crit.reset(CriticalSectionWrapper::CreateCriticalSection()); if (_crit.get() == NULL) return false; @@ -156,8 +145,10 @@ bool AudioConferenceMixerImpl::Init() if(_cbCrit.get() == NULL) return false; - _limiter.reset(AudioProcessing::Create(_id)); - if(_limiter.get() == NULL) + Config config; + config.Set(new ExperimentalAgc(false)); + _limiter.reset(AudioProcessing::Create(config)); + if(!_limiter.get()) return false; MemoryPool::CreateMemoryPool(_audioFramePool, @@ -168,10 +159,6 @@ bool AudioConferenceMixerImpl::Init() if(SetOutputFrequency(kDefaultFrequency) == -1) return false; - // Assume mono. - if (!SetNumLimiterChannels(1)) - return false; - if(_limiter->gain_control()->set_mode(GainControl::kFixedDigital) != _limiter->kNoError) return false; @@ -195,25 +182,21 @@ bool AudioConferenceMixerImpl::Init() return true; } -AudioConferenceMixerImpl::~AudioConferenceMixerImpl() -{ +AudioConferenceMixerImpl::~AudioConferenceMixerImpl() { MemoryPool::DeleteMemoryPool(_audioFramePool); assert(_audioFramePool == NULL); } -int32_t AudioConferenceMixerImpl::ChangeUniqueId(const int32_t id) -{ +int32_t AudioConferenceMixerImpl::ChangeUniqueId(const int32_t id) { _id = id; return 0; } // Process should be called every kProcessPeriodicityInMs ms -int32_t AudioConferenceMixerImpl::TimeUntilNextProcess() -{ +int32_t AudioConferenceMixerImpl::TimeUntilNextProcess() { int32_t timeUntilNextProcess = 0; CriticalSectionScoped cs(_crit.get()); - if(_timeScheduler.TimeToNextUpdate(timeUntilNextProcess) != 0) - { + if(_timeScheduler.TimeToNextUpdate(timeUntilNextProcess) != 0) { WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id, "failed in TimeToNextUpdate() call"); // Sanity check @@ -223,9 +206,8 @@ int32_t AudioConferenceMixerImpl::TimeUntilNextProcess() return timeUntilNextProcess; } -int32_t AudioConferenceMixerImpl::Process() -{ - uint32_t remainingParticipantsAllowedToMix = +int32_t AudioConferenceMixerImpl::Process() { + size_t remainingParticipantsAllowedToMix = kMaximumAmountOfMixedParticipants; { CriticalSectionScoped cs(_crit.get()); @@ -236,9 +218,9 @@ int32_t AudioConferenceMixerImpl::Process() _timeScheduler.UpdateScheduler(); } - ListWrapper mixList; - ListWrapper rampOutList; - ListWrapper additionalFramesList; + AudioFrameList mixList; + AudioFrameList rampOutList; + AudioFrameList additionalFramesList; std::map mixedParticipantsMap; { CriticalSectionScoped cs(_cbCrit.get()); @@ -249,41 +231,34 @@ int32_t AudioConferenceMixerImpl::Process() // information. // TODO(henrike): this is probably more appropriate to do in // GetLowestMixingFrequency(). - if (lowFreq == 12000) - { + if (lowFreq == 12000) { lowFreq = 16000; } else if (lowFreq == 24000) { lowFreq = 32000; } - if(lowFreq <= 0) - { + if(lowFreq <= 0) { CriticalSectionScoped cs(_crit.get()); _processCalls--; return 0; - } else { - switch(lowFreq) - { + } else { + switch(lowFreq) { case 8000: - if(OutputFrequency() != kNbInHz) - { + if(OutputFrequency() != kNbInHz) { SetOutputFrequency(kNbInHz); } break; case 16000: - if(OutputFrequency() != kWbInHz) - { + if(OutputFrequency() != kWbInHz) { SetOutputFrequency(kWbInHz); } break; case 32000: - if(OutputFrequency() != kSwbInHz) - { + if(OutputFrequency() != kSwbInHz) { SetOutputFrequency(kSwbInHz); } break; case 48000: - if(OutputFrequency() != kFbInHz) - { + if(OutputFrequency() != kFbInHz) { SetOutputFrequency(kFbInHz); } break; @@ -296,19 +271,17 @@ int32_t AudioConferenceMixerImpl::Process() } } - UpdateToMix(mixList, rampOutList, &mixedParticipantsMap, + UpdateToMix(&mixList, &rampOutList, &mixedParticipantsMap, remainingParticipantsAllowedToMix); - GetAdditionalAudio(additionalFramesList); + GetAdditionalAudio(&additionalFramesList); UpdateMixedStatus(mixedParticipantsMap); - _scratchParticipantsToMixAmount = - static_cast(mixedParticipantsMap.size()); + _scratchParticipantsToMixAmount = mixedParticipantsMap.size(); } // Get an AudioFrame for mixing from the memory pool. AudioFrame* mixedAudio = NULL; - if(_audioFramePool->PopMemory(mixedAudio) == -1) - { + if(_audioFramePool->PopMemory(mixedAudio) == -1) { WEBRTC_TRACE(kTraceMemory, kTraceAudioMixerServer, _id, "failed PopMemory() call"); assert(false); @@ -325,12 +298,9 @@ int32_t AudioConferenceMixerImpl::Process() // with an API instead of dynamically. // Find the max channels over all mixing lists. - const int num_mixed_channels = std::max(MaxNumChannels(mixList), - std::max(MaxNumChannels(additionalFramesList), - MaxNumChannels(rampOutList))); - - if (!SetNumLimiterChannels(num_mixed_channels)) - retval = -1; + const int num_mixed_channels = std::max(MaxNumChannels(&mixList), + std::max(MaxNumChannels(&additionalFramesList), + MaxNumChannels(&rampOutList))); mixedAudio->UpdateFrame(-1, _timeStamp, NULL, 0, _outputFrequency, AudioFrame::kNormalSpeech, @@ -338,18 +308,15 @@ int32_t AudioConferenceMixerImpl::Process() _timeStamp += _sampleSize; - MixFromList(*mixedAudio, mixList); - MixAnonomouslyFromList(*mixedAudio, additionalFramesList); - MixAnonomouslyFromList(*mixedAudio, rampOutList); + MixFromList(*mixedAudio, &mixList); + MixAnonomouslyFromList(*mixedAudio, &additionalFramesList); + MixAnonomouslyFromList(*mixedAudio, &rampOutList); - if(mixedAudio->samples_per_channel_ == 0) - { + if(mixedAudio->samples_per_channel_ == 0) { // Nothing was mixed, set the audio samples to silence. mixedAudio->samples_per_channel_ = _sampleSize; mixedAudio->Mute(); - } - else - { + } else { // Only call the limiter if we have something to mix. if(!LimitMixedAudio(*mixedAudio)) retval = -1; @@ -358,12 +325,10 @@ int32_t AudioConferenceMixerImpl::Process() _mixedAudioLevel.ComputeLevel(mixedAudio->data_,_sampleSize); audioLevel = _mixedAudioLevel.GetLevel(); - if(_mixerStatusCb) - { + if(_mixerStatusCb) { _scratchVadPositiveParticipantsAmount = 0; - UpdateVADPositiveParticipants(mixList); - if(_amountOf10MsUntilNextCallback-- == 0) - { + UpdateVADPositiveParticipants(&mixList); + if(_amountOf10MsUntilNextCallback-- == 0) { _amountOf10MsUntilNextCallback = _amountOf10MsBetweenCallbacks; timeForMixerCallback = true; } @@ -372,8 +337,7 @@ int32_t AudioConferenceMixerImpl::Process() { CriticalSectionScoped cs(_cbCrit.get()); - if(_mixReceiver != NULL) - { + if(_mixReceiver != NULL) { const AudioFrame** dummy = NULL; _mixReceiver->NewMixedAudio( _id, @@ -383,12 +347,11 @@ int32_t AudioConferenceMixerImpl::Process() } if((_mixerStatusCallback != NULL) && - timeForMixerCallback) - { + timeForMixerCallback) { _mixerStatusCallback->MixedParticipants( _id, _scratchMixedParticipants, - _scratchParticipantsToMixAmount); + static_cast(_scratchParticipantsToMixAmount)); _mixerStatusCallback->VADPositiveParticipants( _id, @@ -400,9 +363,9 @@ int32_t AudioConferenceMixerImpl::Process() // Reclaim all outstanding memory. _audioFramePool->PushMemory(mixedAudio); - ClearAudioFrameList(mixList); - ClearAudioFrameList(rampOutList); - ClearAudioFrameList(additionalFramesList); + ClearAudioFrameList(&mixList); + ClearAudioFrameList(&rampOutList); + ClearAudioFrameList(&additionalFramesList); { CriticalSectionScoped cs(_crit.get()); _processCalls--; @@ -411,22 +374,18 @@ int32_t AudioConferenceMixerImpl::Process() } int32_t AudioConferenceMixerImpl::RegisterMixedStreamCallback( - AudioMixerOutputReceiver& mixReceiver) -{ + AudioMixerOutputReceiver& mixReceiver) { CriticalSectionScoped cs(_cbCrit.get()); - if(_mixReceiver != NULL) - { + if(_mixReceiver != NULL) { return -1; } _mixReceiver = &mixReceiver; return 0; } -int32_t AudioConferenceMixerImpl::UnRegisterMixedStreamCallback() -{ +int32_t AudioConferenceMixerImpl::UnRegisterMixedStreamCallback() { CriticalSectionScoped cs(_cbCrit.get()); - if(_mixReceiver == NULL) - { + if(_mixReceiver == NULL) { return -1; } _mixReceiver = NULL; @@ -434,16 +393,8 @@ int32_t AudioConferenceMixerImpl::UnRegisterMixedStreamCallback() } int32_t AudioConferenceMixerImpl::SetOutputFrequency( - const Frequency frequency) -{ + const Frequency frequency) { CriticalSectionScoped cs(_crit.get()); - const int error = _limiter->set_sample_rate_hz(frequency); - if(error != _limiter->kNoError) - { - WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id, - "Error from AudioProcessing: %d", error); - return -1; - } _outputFrequency = frequency; _sampleSize = (_outputFrequency*kProcessPeriodicityInMs) / 1000; @@ -452,36 +403,15 @@ int32_t AudioConferenceMixerImpl::SetOutputFrequency( } AudioConferenceMixer::Frequency -AudioConferenceMixerImpl::OutputFrequency() const -{ +AudioConferenceMixerImpl::OutputFrequency() const { CriticalSectionScoped cs(_crit.get()); return _outputFrequency; } -bool AudioConferenceMixerImpl::SetNumLimiterChannels(int numChannels) -{ - if(_limiter->num_input_channels() != numChannels) - { - const int error = _limiter->set_num_channels(numChannels, - numChannels); - if(error != _limiter->kNoError) - { - WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id, - "Error from AudioProcessing: %d", error); - assert(false); - return false; - } - } - - return true; -} - int32_t AudioConferenceMixerImpl::RegisterMixerStatusCallback( AudioMixerStatusReceiver& mixerStatusCallback, - const uint32_t amountOf10MsBetweenCallbacks) -{ - if(amountOf10MsBetweenCallbacks == 0) - { + const uint32_t amountOf10MsBetweenCallbacks) { + if(amountOf10MsBetweenCallbacks == 0) { WEBRTC_TRACE( kTraceWarning, kTraceAudioMixerServer, @@ -491,8 +421,7 @@ int32_t AudioConferenceMixerImpl::RegisterMixerStatusCallback( } { CriticalSectionScoped cs(_cbCrit.get()); - if(_mixerStatusCallback != NULL) - { + if(_mixerStatusCallback != NULL) { WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id, "Mixer status callback already registered"); return -1; @@ -508,8 +437,7 @@ int32_t AudioConferenceMixerImpl::RegisterMixerStatusCallback( return 0; } -int32_t AudioConferenceMixerImpl::UnRegisterMixerStatusCallback() -{ +int32_t AudioConferenceMixerImpl::UnRegisterMixerStatusCallback() { { CriticalSectionScoped cs(_crit.get()); if(!_mixerStatusCb) @@ -529,38 +457,31 @@ int32_t AudioConferenceMixerImpl::UnRegisterMixerStatusCallback() int32_t AudioConferenceMixerImpl::SetMixabilityStatus( MixerParticipant& participant, - const bool mixable) -{ - if (!mixable) - { + bool mixable) { + if (!mixable) { // Anonymous participants are in a separate list. Make sure that the // participant is in the _participantList if it is being mixed. SetAnonymousMixabilityStatus(participant, false); } - uint32_t numMixedParticipants; + size_t numMixedParticipants; { CriticalSectionScoped cs(_cbCrit.get()); const bool isMixed = - IsParticipantInList(participant,_participantList); + IsParticipantInList(participant, &_participantList); // API must be called with a new state. - if(!(mixable ^ isMixed)) - { + if(!(mixable ^ isMixed)) { WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id, "Mixable is aready %s", isMixed ? "ON" : "off"); return -1; } bool success = false; - if(mixable) - { - success = AddParticipantToList(participant,_participantList); + if(mixable) { + success = AddParticipantToList(participant, &_participantList); + } else { + success = RemoveParticipantFromList(participant, &_participantList); } - else - { - success = RemoveParticipantFromList(participant,_participantList); - } - if(!success) - { + if(!success) { WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id, "failed to %s participant", mixable ? "add" : "remove"); @@ -568,13 +489,12 @@ int32_t AudioConferenceMixerImpl::SetMixabilityStatus( return -1; } - int numMixedNonAnonymous = _participantList.GetSize(); - if (numMixedNonAnonymous > kMaximumAmountOfMixedParticipants) - { + size_t numMixedNonAnonymous = _participantList.size(); + if (numMixedNonAnonymous > kMaximumAmountOfMixedParticipants) { numMixedNonAnonymous = kMaximumAmountOfMixedParticipants; } - numMixedParticipants = numMixedNonAnonymous + - _additionalParticipantList.GetSize(); + numMixedParticipants = + numMixedNonAnonymous + _additionalParticipantList.size(); } // A MixerParticipant was added or removed. Make sure the scratch // buffer is updated if necessary. @@ -586,40 +506,34 @@ int32_t AudioConferenceMixerImpl::SetMixabilityStatus( int32_t AudioConferenceMixerImpl::MixabilityStatus( MixerParticipant& participant, - bool& mixable) -{ + bool& mixable) { CriticalSectionScoped cs(_cbCrit.get()); - mixable = IsParticipantInList(participant, _participantList); + mixable = IsParticipantInList(participant, &_participantList); return 0; } int32_t AudioConferenceMixerImpl::SetAnonymousMixabilityStatus( - MixerParticipant& participant, const bool anonymous) -{ + MixerParticipant& participant, const bool anonymous) { CriticalSectionScoped cs(_cbCrit.get()); - if(IsParticipantInList(participant, _additionalParticipantList)) - { - if(anonymous) - { + if(IsParticipantInList(participant, &_additionalParticipantList)) { + if(anonymous) { return 0; } - if(!RemoveParticipantFromList(participant, _additionalParticipantList)) - { + if(!RemoveParticipantFromList(participant, + &_additionalParticipantList)) { WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id, "unable to remove participant from anonymous list"); assert(false); return -1; } - return AddParticipantToList(participant, _participantList) ? 0 : -1; + return AddParticipantToList(participant, &_participantList) ? 0 : -1; } - if(!anonymous) - { + if(!anonymous) { return 0; } const bool mixable = RemoveParticipantFromList(participant, - _participantList); - if(!mixable) - { + &_participantList); + if(!mixable) { WEBRTC_TRACE( kTraceWarning, kTraceAudioMixerServer, @@ -629,39 +543,33 @@ int32_t AudioConferenceMixerImpl::SetAnonymousMixabilityStatus( // already registered. return -1; } - return AddParticipantToList(participant, _additionalParticipantList) ? + return AddParticipantToList(participant, &_additionalParticipantList) ? 0 : -1; } int32_t AudioConferenceMixerImpl::AnonymousMixabilityStatus( - MixerParticipant& participant, bool& mixable) -{ + MixerParticipant& participant, bool& mixable) { CriticalSectionScoped cs(_cbCrit.get()); mixable = IsParticipantInList(participant, - _additionalParticipantList); + &_additionalParticipantList); return 0; } int32_t AudioConferenceMixerImpl::SetMinimumMixingFrequency( - Frequency freq) -{ + Frequency freq) { // Make sure that only allowed sampling frequencies are used. Use closest // higher sampling frequency to avoid losing information. - if (static_cast(freq) == 12000) - { + if (static_cast(freq) == 12000) { freq = kWbInHz; } else if (static_cast(freq) == 24000) { freq = kSwbInHz; } if((freq == kNbInHz) || (freq == kWbInHz) || (freq == kSwbInHz) || - (freq == kLowestPossible)) - { + (freq == kLowestPossible)) { _minimumMixingFreq=freq; return 0; - } - else - { + } else { WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id, "SetMinimumMixingFrequency incorrect frequency: %i",freq); assert(false); @@ -671,20 +579,17 @@ int32_t AudioConferenceMixerImpl::SetMinimumMixingFrequency( // Check all AudioFrames that are to be mixed. The highest sampling frequency // found is the lowest that can be used without losing information. -int32_t AudioConferenceMixerImpl::GetLowestMixingFrequency() -{ +int32_t AudioConferenceMixerImpl::GetLowestMixingFrequency() { const int participantListFrequency = - GetLowestMixingFrequencyFromList(_participantList); + GetLowestMixingFrequencyFromList(&_participantList); const int anonymousListFrequency = - GetLowestMixingFrequencyFromList(_additionalParticipantList); + GetLowestMixingFrequencyFromList(&_additionalParticipantList); const int highestFreq = (participantListFrequency > anonymousListFrequency) ? participantListFrequency : anonymousListFrequency; // Check if the user specified a lowest mixing frequency. - if(_minimumMixingFreq != kLowestPossible) - { - if(_minimumMixingFreq > highestFreq) - { + if(_minimumMixingFreq != kLowestPossible) { + if(_minimumMixingFreq > highestFreq) { return _minimumMixingFreq; } } @@ -692,60 +597,47 @@ int32_t AudioConferenceMixerImpl::GetLowestMixingFrequency() } int32_t AudioConferenceMixerImpl::GetLowestMixingFrequencyFromList( - ListWrapper& mixList) -{ + MixerParticipantList* mixList) { int32_t highestFreq = 8000; - ListItem* item = mixList.First(); - while(item) - { - MixerParticipant* participant = - static_cast(item->GetItem()); - const int32_t neededFrequency = participant->NeededFrequency(_id); - if(neededFrequency > highestFreq) - { + for (MixerParticipantList::iterator iter = mixList->begin(); + iter != mixList->end(); + ++iter) { + const int32_t neededFrequency = (*iter)->NeededFrequency(_id); + if(neededFrequency > highestFreq) { highestFreq = neededFrequency; } - item = mixList.Next(item); } return highestFreq; } void AudioConferenceMixerImpl::UpdateToMix( - ListWrapper& mixList, - ListWrapper& rampOutList, + AudioFrameList* mixList, + AudioFrameList* rampOutList, std::map* mixParticipantList, - uint32_t& maxAudioFrameCounter) { + size_t& maxAudioFrameCounter) { WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id, "UpdateToMix(mixList,rampOutList,mixParticipantList,%d)", maxAudioFrameCounter); - const uint32_t mixListStartSize = mixList.GetSize(); - ListWrapper activeList; // Elements are AudioFrames + const size_t mixListStartSize = mixList->size(); + AudioFrameList activeList; // Struct needed by the passive lists to keep track of which AudioFrame // belongs to which MixerParticipant. - struct ParticipantFramePair - { - MixerParticipant* participant; - AudioFrame* audioFrame; - }; - ListWrapper passiveWasNotMixedList; // Elements are MixerParticipant - ListWrapper passiveWasMixedList; // Elements are MixerParticipant - ListItem* item = _participantList.First(); - while(item) - { + ParticipantFramePairList passiveWasNotMixedList; + ParticipantFramePairList passiveWasMixedList; + for (MixerParticipantList::iterator participant = _participantList.begin(); + participant != _participantList.end(); + ++participant) { // Stop keeping track of passive participants if there are already // enough participants available (they wont be mixed anyway). bool mustAddToPassiveList = (maxAudioFrameCounter > - (activeList.GetSize() + - passiveWasMixedList.GetSize() + - passiveWasNotMixedList.GetSize())); + (activeList.size() + + passiveWasMixedList.size() + + passiveWasNotMixedList.size())); - MixerParticipant* participant = static_cast( - item->GetItem()); bool wasMixed = false; - participant->_mixHistory->WasMixed(wasMixed); + (*participant)->_mixHistory->WasMixed(wasMixed); AudioFrame* audioFrame = NULL; - if(_audioFramePool->PopMemory(audioFrame) == -1) - { + if(_audioFramePool->PopMemory(audioFrame) == -1) { WEBRTC_TRACE(kTraceMemory, kTraceAudioMixerServer, _id, "failed PopMemory() call"); assert(false); @@ -753,56 +645,46 @@ void AudioConferenceMixerImpl::UpdateToMix( } audioFrame->sample_rate_hz_ = _outputFrequency; - if(participant->GetAudioFrame(_id,*audioFrame) != 0) - { + if((*participant)->GetAudioFrame(_id,*audioFrame) != 0) { WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id, "failed to GetAudioFrame() from participant"); _audioFramePool->PushMemory(audioFrame); - item = _participantList.Next(item); continue; } // TODO(henrike): this assert triggers in some test cases where SRTP is // used which prevents NetEQ from making a VAD. Temporarily disable this // assert until the problem is fixed on a higher level. // assert(audioFrame->vad_activity_ != AudioFrame::kVadUnknown); - if (audioFrame->vad_activity_ == AudioFrame::kVadUnknown) - { + if (audioFrame->vad_activity_ == AudioFrame::kVadUnknown) { WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id, "invalid VAD state from participant"); } - if(audioFrame->vad_activity_ == AudioFrame::kVadActive) - { - if(!wasMixed) - { + if(audioFrame->vad_activity_ == AudioFrame::kVadActive) { + if(!wasMixed) { RampIn(*audioFrame); } - if(activeList.GetSize() >= maxAudioFrameCounter) - { + if(activeList.size() >= maxAudioFrameCounter) { // There are already more active participants than should be // mixed. Only keep the ones with the highest energy. - ListItem* replaceItem = NULL; + AudioFrameList::iterator replaceItem; CalculateEnergy(*audioFrame); uint32_t lowestEnergy = audioFrame->energy_; - ListItem* activeItem = activeList.First(); - while(activeItem) - { - AudioFrame* replaceFrame = static_cast( - activeItem->GetItem()); - CalculateEnergy(*replaceFrame); - if(replaceFrame->energy_ < lowestEnergy) - { - replaceItem = activeItem; - lowestEnergy = replaceFrame->energy_; + bool found_replace_item = false; + for (AudioFrameList::iterator iter = activeList.begin(); + iter != activeList.end(); + ++iter) { + CalculateEnergy(**iter); + if((*iter)->energy_ < lowestEnergy) { + replaceItem = iter; + lowestEnergy = (*iter)->energy_; + found_replace_item = true; } - activeItem = activeList.Next(activeItem); } - if(replaceItem != NULL) - { - AudioFrame* replaceFrame = static_cast( - replaceItem->GetItem()); + if(found_replace_item) { + AudioFrame* replaceFrame = *replaceItem; bool replaceWasMixed = false; std::map::iterator it = @@ -815,255 +697,219 @@ void AudioConferenceMixerImpl::UpdateToMix( it->second->_mixHistory->WasMixed(replaceWasMixed); mixParticipantList->erase(replaceFrame->id_); - activeList.Erase(replaceItem); + activeList.erase(replaceItem); - activeList.PushFront(static_cast(audioFrame)); - (*mixParticipantList)[audioFrame->id_] = participant; + activeList.push_front(audioFrame); + (*mixParticipantList)[audioFrame->id_] = *participant; assert(mixParticipantList->size() <= kMaximumAmountOfMixedParticipants); if (replaceWasMixed) { RampOut(*replaceFrame); - rampOutList.PushBack(static_cast(replaceFrame)); - assert(rampOutList.GetSize() <= + rampOutList->push_back(replaceFrame); + assert(rampOutList->size() <= kMaximumAmountOfMixedParticipants); } else { _audioFramePool->PushMemory(replaceFrame); } } else { - if(wasMixed) - { + if(wasMixed) { RampOut(*audioFrame); - rampOutList.PushBack(static_cast(audioFrame)); - assert(rampOutList.GetSize() <= + rampOutList->push_back(audioFrame); + assert(rampOutList->size() <= kMaximumAmountOfMixedParticipants); } else { _audioFramePool->PushMemory(audioFrame); } } } else { - activeList.PushFront(static_cast(audioFrame)); - (*mixParticipantList)[audioFrame->id_] = participant; + activeList.push_front(audioFrame); + (*mixParticipantList)[audioFrame->id_] = *participant; assert(mixParticipantList->size() <= kMaximumAmountOfMixedParticipants); } } else { - if(wasMixed) - { + if(wasMixed) { ParticipantFramePair* pair = new ParticipantFramePair; pair->audioFrame = audioFrame; - pair->participant = participant; - passiveWasMixedList.PushBack(static_cast(pair)); + pair->participant = *participant; + passiveWasMixedList.push_back(pair); } else if(mustAddToPassiveList) { RampIn(*audioFrame); ParticipantFramePair* pair = new ParticipantFramePair; pair->audioFrame = audioFrame; - pair->participant = participant; - passiveWasNotMixedList.PushBack(static_cast(pair)); + pair->participant = *participant; + passiveWasNotMixedList.push_back(pair); } else { _audioFramePool->PushMemory(audioFrame); } } - item = _participantList.Next(item); } - assert(activeList.GetSize() <= maxAudioFrameCounter); + assert(activeList.size() <= maxAudioFrameCounter); // At this point it is known which participants should be mixed. Transfer // this information to this functions output parameters. - while(!activeList.Empty()) - { - ListItem* mixItem = activeList.First(); - mixList.PushBack(mixItem->GetItem()); - activeList.Erase(mixItem); + for (AudioFrameList::iterator iter = activeList.begin(); + iter != activeList.end(); + ++iter) { + mixList->push_back(*iter); } + activeList.clear(); // Always mix a constant number of AudioFrames. If there aren't enough // active participants mix passive ones. Starting with those that was mixed // last iteration. - while(!passiveWasMixedList.Empty()) - { - ListItem* mixItem = passiveWasMixedList.First(); - ParticipantFramePair* pair = static_cast( - mixItem->GetItem()); - if(mixList.GetSize() < maxAudioFrameCounter + mixListStartSize) - { - mixList.PushBack(pair->audioFrame); - (*mixParticipantList)[pair->audioFrame->id_] = - pair->participant; + for (ParticipantFramePairList::iterator iter = passiveWasMixedList.begin(); + iter != passiveWasMixedList.end(); + ++iter) { + if(mixList->size() < maxAudioFrameCounter + mixListStartSize) { + mixList->push_back((*iter)->audioFrame); + (*mixParticipantList)[(*iter)->audioFrame->id_] = + (*iter)->participant; assert(mixParticipantList->size() <= kMaximumAmountOfMixedParticipants); + } else { + _audioFramePool->PushMemory((*iter)->audioFrame); } - else - { - _audioFramePool->PushMemory(pair->audioFrame); - } - delete pair; - passiveWasMixedList.Erase(mixItem); + delete *iter; } // And finally the ones that have not been mixed for a while. - while(!passiveWasNotMixedList.Empty()) - { - ListItem* mixItem = passiveWasNotMixedList.First(); - ParticipantFramePair* pair = static_cast( - mixItem->GetItem()); - if(mixList.GetSize() < maxAudioFrameCounter + mixListStartSize) - { - mixList.PushBack(pair->audioFrame); - (*mixParticipantList)[pair->audioFrame->id_] = pair->participant; + for (ParticipantFramePairList::iterator iter = + passiveWasNotMixedList.begin(); + iter != passiveWasNotMixedList.end(); + ++iter) { + if(mixList->size() < maxAudioFrameCounter + mixListStartSize) { + mixList->push_back((*iter)->audioFrame); + (*mixParticipantList)[(*iter)->audioFrame->id_] = + (*iter)->participant; assert(mixParticipantList->size() <= kMaximumAmountOfMixedParticipants); + } else { + _audioFramePool->PushMemory((*iter)->audioFrame); } - else - { - _audioFramePool->PushMemory(pair->audioFrame); - } - delete pair; - passiveWasNotMixedList.Erase(mixItem); + delete *iter; } - assert(maxAudioFrameCounter + mixListStartSize >= mixList.GetSize()); - maxAudioFrameCounter += mixListStartSize - mixList.GetSize(); + assert(maxAudioFrameCounter + mixListStartSize >= mixList->size()); + maxAudioFrameCounter += mixListStartSize - mixList->size(); } void AudioConferenceMixerImpl::GetAdditionalAudio( - ListWrapper& additionalFramesList) -{ + AudioFrameList* additionalFramesList) { WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id, "GetAdditionalAudio(additionalFramesList)"); - ListItem* item = _additionalParticipantList.First(); - while(item) - { - // The GetAudioFrame() callback may remove the current item. Store the - // next item just in case that happens. - ListItem* nextItem = _additionalParticipantList.Next(item); + // The GetAudioFrame() callback may result in the participant being removed + // from additionalParticipantList_. If that happens it will invalidate any + // iterators. Create a copy of the participants list such that the list of + // participants can be traversed safely. + MixerParticipantList additionalParticipantList; + additionalParticipantList.insert(additionalParticipantList.begin(), + _additionalParticipantList.begin(), + _additionalParticipantList.end()); - MixerParticipant* participant = static_cast( - item->GetItem()); + for (MixerParticipantList::iterator participant = + additionalParticipantList.begin(); + participant != additionalParticipantList.end(); + ++participant) { AudioFrame* audioFrame = NULL; - if(_audioFramePool->PopMemory(audioFrame) == -1) - { + if(_audioFramePool->PopMemory(audioFrame) == -1) { WEBRTC_TRACE(kTraceMemory, kTraceAudioMixerServer, _id, "failed PopMemory() call"); assert(false); return; } audioFrame->sample_rate_hz_ = _outputFrequency; - if(participant->GetAudioFrame(_id, *audioFrame) != 0) - { + if((*participant)->GetAudioFrame(_id, *audioFrame) != 0) { WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id, "failed to GetAudioFrame() from participant"); _audioFramePool->PushMemory(audioFrame); - item = nextItem; continue; } - if(audioFrame->samples_per_channel_ == 0) - { + if(audioFrame->samples_per_channel_ == 0) { // Empty frame. Don't use it. _audioFramePool->PushMemory(audioFrame); - item = nextItem; continue; } - additionalFramesList.PushBack(static_cast(audioFrame)); - item = nextItem; + additionalFramesList->push_back(audioFrame); } } void AudioConferenceMixerImpl::UpdateMixedStatus( - std::map& mixedParticipantsMap) -{ + std::map& mixedParticipantsMap) { WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id, "UpdateMixedStatus(mixedParticipantsMap)"); assert(mixedParticipantsMap.size() <= kMaximumAmountOfMixedParticipants); // Loop through all participants. If they are in the mix map they // were mixed. - ListItem* participantItem = _participantList.First(); - while(participantItem != NULL) - { + for (MixerParticipantList::iterator participant = _participantList.begin(); + participant != _participantList.end(); + ++participant) { bool isMixed = false; - MixerParticipant* participant = - static_cast(participantItem->GetItem()); - for (std::map::iterator it = mixedParticipantsMap.begin(); it != mixedParticipantsMap.end(); ++it) { - if (it->second == participant) { + if (it->second == *participant) { isMixed = true; break; } } - participant->_mixHistory->SetIsMixed(isMixed); - participantItem = _participantList.Next(participantItem); + (*participant)->_mixHistory->SetIsMixed(isMixed); } } -void AudioConferenceMixerImpl::ClearAudioFrameList(ListWrapper& audioFrameList) -{ +void AudioConferenceMixerImpl::ClearAudioFrameList( + AudioFrameList* audioFrameList) { WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id, "ClearAudioFrameList(audioFrameList)"); - ListItem* item = audioFrameList.First(); - while(item) - { - AudioFrame* audioFrame = static_cast(item->GetItem()); - _audioFramePool->PushMemory(audioFrame); - audioFrameList.Erase(item); - item = audioFrameList.First(); + for (AudioFrameList::iterator iter = audioFrameList->begin(); + iter != audioFrameList->end(); + ++iter) { + _audioFramePool->PushMemory(*iter); } + audioFrameList->clear(); } void AudioConferenceMixerImpl::UpdateVADPositiveParticipants( - ListWrapper& mixList) -{ + AudioFrameList* mixList) { WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id, "UpdateVADPositiveParticipants(mixList)"); - ListItem* item = mixList.First(); - while(item != NULL) - { - AudioFrame* audioFrame = static_cast(item->GetItem()); - CalculateEnergy(*audioFrame); - if(audioFrame->vad_activity_ == AudioFrame::kVadActive) - { + for (AudioFrameList::iterator iter = mixList->begin(); + iter != mixList->end(); + ++iter) { + CalculateEnergy(**iter); + if((*iter)->vad_activity_ == AudioFrame::kVadActive) { _scratchVadPositiveParticipants[ _scratchVadPositiveParticipantsAmount].participant = - audioFrame->id_; + (*iter)->id_; // TODO(andrew): to what should this be set? _scratchVadPositiveParticipants[ _scratchVadPositiveParticipantsAmount].level = 0; _scratchVadPositiveParticipantsAmount++; } - item = mixList.Next(item); } } bool AudioConferenceMixerImpl::IsParticipantInList( MixerParticipant& participant, - ListWrapper& participantList) -{ + MixerParticipantList* participantList) const { WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id, "IsParticipantInList(participant,participantList)"); - ListItem* item = participantList.First(); - while(item != NULL) - { - MixerParticipant* rhsParticipant = - static_cast(item->GetItem()); - if(&participant == rhsParticipant) - { + for (MixerParticipantList::const_iterator iter = participantList->begin(); + iter != participantList->end(); + ++iter) { + if(&participant == *iter) { return true; } - item = participantList.Next(item); } return false; } bool AudioConferenceMixerImpl::AddParticipantToList( MixerParticipant& participant, - ListWrapper& participantList) -{ + MixerParticipantList* participantList) { WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id, "AddParticipantToList(participant, participantList)"); - if(participantList.PushBack(static_cast(&participant)) == -1) - { - return false; - } + participantList->push_back(&participant); // Make sure that the mixed status is correct for new MixerParticipant. participant._mixHistory->ResetMixedStatus(); return true; @@ -1071,52 +917,43 @@ bool AudioConferenceMixerImpl::AddParticipantToList( bool AudioConferenceMixerImpl::RemoveParticipantFromList( MixerParticipant& participant, - ListWrapper& participantList) -{ + MixerParticipantList* participantList) { WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id, "RemoveParticipantFromList(participant, participantList)"); - ListItem* item = participantList.First(); - while(item) - { - if(item->GetItem() == &participant) - { - participantList.Erase(item); + for (MixerParticipantList::iterator iter = participantList->begin(); + iter != participantList->end(); + ++iter) { + if(*iter == &participant) { + participantList->erase(iter); // Participant is no longer mixed, reset to default. participant._mixHistory->ResetMixedStatus(); return true; } - item = participantList.Next(item); } return false; } int32_t AudioConferenceMixerImpl::MixFromList( AudioFrame& mixedAudio, - const ListWrapper& audioFrameList) -{ + const AudioFrameList* audioFrameList) { WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id, "MixFromList(mixedAudio, audioFrameList)"); - uint32_t position = 0; - ListItem* item = audioFrameList.First(); - if(item == NULL) - { - return 0; - } + if(audioFrameList->empty()) return 0; - if(_numMixedParticipants == 1) - { + uint32_t position = 0; + if(_numMixedParticipants == 1) { // No mixing required here; skip the saturation protection. - AudioFrame* audioFrame = static_cast(item->GetItem()); + AudioFrame* audioFrame = audioFrameList->front(); mixedAudio.CopyFrom(*audioFrame); SetParticipantStatistics(&_scratchMixedParticipants[position], *audioFrame); return 0; } - while(item != NULL) - { - if(position >= kMaximumAmountOfMixedParticipants) - { + for (AudioFrameList::const_iterator iter = audioFrameList->begin(); + iter != audioFrameList->end(); + ++iter) { + if(position >= kMaximumAmountOfMixedParticipants) { WEBRTC_TRACE( kTraceMemory, kTraceAudioMixerServer, @@ -1127,14 +964,12 @@ int32_t AudioConferenceMixerImpl::MixFromList( assert(false); position = 0; } - AudioFrame* audioFrame = static_cast(item->GetItem()); - MixFrames(&mixedAudio, audioFrame); + MixFrames(&mixedAudio, (*iter)); SetParticipantStatistics(&_scratchMixedParticipants[position], - *audioFrame); + **iter); position++; - item = audioFrameList.Next(item); } return 0; @@ -1143,35 +978,29 @@ int32_t AudioConferenceMixerImpl::MixFromList( // TODO(andrew): consolidate this function with MixFromList. int32_t AudioConferenceMixerImpl::MixAnonomouslyFromList( AudioFrame& mixedAudio, - const ListWrapper& audioFrameList) -{ + const AudioFrameList* audioFrameList) { WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id, "MixAnonomouslyFromList(mixedAudio, audioFrameList)"); - ListItem* item = audioFrameList.First(); - if(item == NULL) - return 0; - if(_numMixedParticipants == 1) - { + if(audioFrameList->empty()) return 0; + + if(_numMixedParticipants == 1) { // No mixing required here; skip the saturation protection. - AudioFrame* audioFrame = static_cast(item->GetItem()); + AudioFrame* audioFrame = audioFrameList->front(); mixedAudio.CopyFrom(*audioFrame); return 0; } - while(item != NULL) - { - AudioFrame* audioFrame = static_cast(item->GetItem()); - MixFrames(&mixedAudio, audioFrame); - item = audioFrameList.Next(item); + for (AudioFrameList::const_iterator iter = audioFrameList->begin(); + iter != audioFrameList->end(); + ++iter) { + MixFrames(&mixedAudio, *iter); } return 0; } -bool AudioConferenceMixerImpl::LimitMixedAudio(AudioFrame& mixedAudio) -{ - if(_numMixedParticipants == 1) - { +bool AudioConferenceMixerImpl::LimitMixedAudio(AudioFrame& mixedAudio) { + if(_numMixedParticipants == 1) { return true; } @@ -1190,8 +1019,7 @@ bool AudioConferenceMixerImpl::LimitMixedAudio(AudioFrame& mixedAudio) // negative value is undefined). mixedAudio += mixedAudio; - if(error != _limiter->kNoError) - { + if(error != _limiter->kNoError) { WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id, "Error from AudioProcessing: %d", error); assert(false); diff --git a/media/webrtc/trunk/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h b/media/webrtc/trunk/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h index 737acbb0ba4e..31dc71e5dce7 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h +++ b/media/webrtc/trunk/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h @@ -11,6 +11,7 @@ #ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_AUDIO_CONFERENCE_MIXER_IMPL_H_ #define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_AUDIO_CONFERENCE_MIXER_IMPL_H_ +#include #include #include "webrtc/engine_configurations.h" @@ -19,13 +20,15 @@ #include "webrtc/modules/audio_conference_mixer/source/memory_pool.h" #include "webrtc/modules/audio_conference_mixer/source/time_scheduler.h" #include "webrtc/modules/interface/module_common_types.h" -#include "webrtc/system_wrappers/interface/list_wrapper.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" namespace webrtc { class AudioProcessing; class CriticalSectionWrapper; +typedef std::list AudioFrameList; +typedef std::list MixerParticipantList; + // Cheshire cat implementation of MixerParticipant's non virtual functions. class MixHistory { @@ -74,7 +77,7 @@ public: const uint32_t amountOf10MsBetweenCallbacks); virtual int32_t UnRegisterMixerStatusCallback(); virtual int32_t SetMixabilityStatus(MixerParticipant& participant, - const bool mixable); + bool mixable); virtual int32_t MixabilityStatus(MixerParticipant& participant, bool& mixable); virtual int32_t SetMinimumMixingFrequency(Frequency freq); @@ -89,10 +92,6 @@ private: int32_t SetOutputFrequency(const Frequency frequency); Frequency OutputFrequency() const; - // Must be called whenever an audio frame indicates the number of channels - // has changed. - bool SetNumLimiterChannels(int numChannels); - // Fills mixList with the AudioFrames pointers that should be used when // mixing. Fills mixParticipantList with ParticipantStatistics for the // participants who's AudioFrames are inside mixList. @@ -102,18 +101,18 @@ private: // used to be mixed but shouldn't be mixed any longer. These AudioFrames // should be ramped out over this AudioFrame to avoid audio discontinuities. void UpdateToMix( - ListWrapper& mixList, - ListWrapper& rampOutList, + AudioFrameList* mixList, + AudioFrameList* rampOutList, std::map* mixParticipantList, - uint32_t& maxAudioFrameCounter); + size_t& maxAudioFrameCounter); // Return the lowest mixing frequency that can be used without having to // downsample any audio. int32_t GetLowestMixingFrequency(); - int32_t GetLowestMixingFrequencyFromList(ListWrapper& mixList); + int32_t GetLowestMixingFrequencyFromList(MixerParticipantList* mixList); // Return the AudioFrames that should be mixed anonymously. - void GetAdditionalAudio(ListWrapper& additionalFramesList); + void GetAdditionalAudio(AudioFrameList* additionalFramesList); // Update the MixHistory of all MixerParticipants. mixedParticipantsList // should contain a map of MixerParticipants that have been mixed. @@ -121,44 +120,44 @@ private: std::map& mixedParticipantsList); // Clears audioFrameList and reclaims all memory associated with it. - void ClearAudioFrameList(ListWrapper& audioFrameList); + void ClearAudioFrameList(AudioFrameList* audioFrameList); // Update the list of MixerParticipants who have a positive VAD. mixList // should be a list of AudioFrames void UpdateVADPositiveParticipants( - ListWrapper& mixList); + AudioFrameList* mixList); // This function returns true if it finds the MixerParticipant in the // specified list of MixerParticipants. bool IsParticipantInList( MixerParticipant& participant, - ListWrapper& participantList); + MixerParticipantList* participantList) const; // Add/remove the MixerParticipant to the specified // MixerParticipant list. bool AddParticipantToList( MixerParticipant& participant, - ListWrapper& participantList); + MixerParticipantList* participantList); bool RemoveParticipantFromList( MixerParticipant& removeParticipant, - ListWrapper& participantList); + MixerParticipantList* participantList); // Mix the AudioFrames stored in audioFrameList into mixedAudio. int32_t MixFromList( AudioFrame& mixedAudio, - const ListWrapper& audioFrameList); + const AudioFrameList* audioFrameList); // Mix the AudioFrames stored in audioFrameList into mixedAudio. No // record will be kept of this mix (e.g. the corresponding MixerParticipants // will not be marked as IsMixed() int32_t MixAnonomouslyFromList(AudioFrame& mixedAudio, - const ListWrapper& audioFrameList); + const AudioFrameList* audioFrameList); bool LimitMixedAudio(AudioFrame& mixedAudio); // Scratch memory // Note that the scratch memory may only be touched in the scope of // Process(). - uint32_t _scratchParticipantsToMixAmount; + size_t _scratchParticipantsToMixAmount; ParticipantStatistics _scratchMixedParticipants[ kMaximumAmountOfMixedParticipants]; uint32_t _scratchVadPositiveParticipantsAmount; @@ -176,9 +175,9 @@ private: AudioMixerOutputReceiver* _mixReceiver; AudioMixerStatusReceiver* _mixerStatusCallback; - uint32_t _amountOf10MsBetweenCallbacks; - uint32_t _amountOf10MsUntilNextCallback; - bool _mixerStatusCb; + uint32_t _amountOf10MsBetweenCallbacks; + uint32_t _amountOf10MsUntilNextCallback; + bool _mixerStatusCb; // The current sample frequency and sample size when mixing. Frequency _outputFrequency; @@ -188,10 +187,11 @@ private: MemoryPool* _audioFramePool; // List of all participants. Note all lists are disjunct - ListWrapper _participantList; // May be mixed. - ListWrapper _additionalParticipantList; // Always mixed, anonomously. + MixerParticipantList _participantList; // May be mixed. + // Always mixed, anonomously. + MixerParticipantList _additionalParticipantList; - uint32_t _numMixedParticipants; + size_t _numMixedParticipants; uint32_t _timeStamp; diff --git a/media/webrtc/trunk/webrtc/modules/audio_conference_mixer/source/memory_pool_posix.h b/media/webrtc/trunk/webrtc/modules/audio_conference_mixer/source/memory_pool_posix.h index 6d4dccf8ae3d..04e7cd52254f 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_conference_mixer/source/memory_pool_posix.h +++ b/media/webrtc/trunk/webrtc/modules/audio_conference_mixer/source/memory_pool_posix.h @@ -12,9 +12,9 @@ #define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_MEMORY_POOL_GENERIC_H_ #include +#include #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" -#include "webrtc/system_wrappers/interface/list_wrapper.h" #include "webrtc/typedefs.h" namespace webrtc { @@ -40,7 +40,7 @@ private: bool _terminate; - ListWrapper _memoryPool; + std::list _memoryPool; uint32_t _initialPoolSize; uint32_t _createdMemory; @@ -51,7 +51,6 @@ template MemoryPoolImpl::MemoryPoolImpl(int32_t initialPoolSize) : _crit(CriticalSectionWrapper::CreateCriticalSection()), _terminate(false), - _memoryPool(), _initialPoolSize(initialPoolSize), _createdMemory(0), _outstandingMemory(0) @@ -76,20 +75,17 @@ int32_t MemoryPoolImpl::PopMemory(MemoryType*& memory) memory = NULL; return -1; } - ListItem* item = _memoryPool.First(); - if(item == NULL) - { + if (_memoryPool.empty()) { // _memoryPool empty create new memory. CreateMemory(_initialPoolSize); - item = _memoryPool.First(); - if(item == NULL) + if(_memoryPool.empty()) { memory = NULL; return -1; } } - memory = static_cast(item->GetItem()); - _memoryPool.Erase(item); + memory = _memoryPool.front(); + _memoryPool.pop_front(); _outstandingMemory++; return 0; } @@ -103,7 +99,7 @@ int32_t MemoryPoolImpl::PushMemory(MemoryType*& memory) } CriticalSectionScoped cs(_crit); _outstandingMemory--; - if(_memoryPool.GetSize() > (_initialPoolSize << 1)) + if(_memoryPool.size() > (_initialPoolSize << 1)) { // Reclaim memory if less than half of the pool is unused. _createdMemory--; @@ -111,7 +107,7 @@ int32_t MemoryPoolImpl::PushMemory(MemoryType*& memory) memory = NULL; return 0; } - _memoryPool.PushBack(static_cast(memory)); + _memoryPool.push_back(memory); memory = NULL; return 0; } @@ -127,21 +123,15 @@ template int32_t MemoryPoolImpl::Terminate() { CriticalSectionScoped cs(_crit); - assert(_createdMemory == _outstandingMemory + _memoryPool.GetSize()); + assert(_createdMemory == _outstandingMemory + _memoryPool.size()); _terminate = true; // Reclaim all memory. while(_createdMemory > 0) { - ListItem* item = _memoryPool.First(); - if(item == NULL) - { - // There is memory that hasn't been returned yet. - return -1; - } - MemoryType* memory = static_cast(item->GetItem()); + MemoryType* memory = _memoryPool.front(); + _memoryPool.pop_front(); delete memory; - _memoryPool.Erase(item); _createdMemory--; } return 0; @@ -158,7 +148,7 @@ int32_t MemoryPoolImpl::CreateMemory( { return -1; } - _memoryPool.PushBack(static_cast(memory)); + _memoryPool.push_back(memory); _createdMemory++; } return 0; diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_common.h b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_common.h new file mode 100644 index 000000000000..f873b04ac9f9 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_common.h @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_COMMON_H_ +#define WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_COMMON_H_ + +namespace webrtc { + +enum { + kDefaultSampleRate = 44100, + kNumChannels = 1, + kDefaultBufSizeInSamples = kDefaultSampleRate * 10 / 1000, +}; + +class PlayoutDelayProvider { + public: + virtual int PlayoutDelayMs() = 0; + + protected: + PlayoutDelayProvider() {} + virtual ~PlayoutDelayProvider() {} +}; + +} // namespace webrtc + +#endif // WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_COMMON_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_jni_android.cc b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_jni_android.cc deleted file mode 100644 index 7585f4e004e3..000000000000 --- a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_jni_android.cc +++ /dev/null @@ -1,2851 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * Android audio device implementation (JNI/AudioTrack/AudioRecord usage) - */ - -// TODO(xians): Break out attach and detach current thread to JVM to -// separate functions. - -#include -#include - -#include "webrtc/modules/audio_device/android/audio_device_jni_android.h" -#include "webrtc/modules/audio_device/audio_device_config.h" -#include "webrtc/modules/audio_device/audio_device_utility.h" - -#include "webrtc/system_wrappers/interface/event_wrapper.h" -#include "webrtc/system_wrappers/interface/thread_wrapper.h" -#include "webrtc/system_wrappers/interface/trace.h" - -#include "AndroidJNIWrapper.h" - -namespace webrtc -{ -// TODO(leozwang): Refactor jni and the following global variables, a -// good example is jni_helper in Chromium. -JavaVM* AudioDeviceAndroidJni::globalJvm = NULL; -jobject AudioDeviceAndroidJni::globalContext = NULL; -jclass AudioDeviceAndroidJni::globalScClass = NULL; - -// ---------------------------------------------------------------------------- -// SetAndroidAudioDeviceObjects -// -// Global function for setting Java pointers and creating Java -// objects that are global to all instances of VoiceEngine used -// by the same Java application. -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::SetAndroidAudioDeviceObjects( - void* javaVM, - void* context) { - return SetAndroidAudioDeviceObjects(javaVM, NULL, context); -} - -int32_t AudioDeviceAndroidJni::SetAndroidAudioDeviceObjects( - void* javaVM, - void* null_env, - void* context) { - WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, -1, - "%s called", __FUNCTION__); - - // TODO(leozwang): Make this function thread-safe. - globalJvm = reinterpret_cast(javaVM); - - JNIEnv* env = NULL; - - // Check if we already got a reference - if (globalJvm && !globalScClass) { - if (globalJvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioDevice, -1, - "%s: could not get Java environment", __FUNCTION__); - return -1; - } - globalJvm->AttachCurrentThread(&env, NULL); - - // Get java class type (note path to class packet). - globalScClass = jsjni_GetGlobalClassRef(AudioCaptureClass); - if (!globalScClass) { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1, - "%s: could not find java class", __FUNCTION__); - return -1; // exception thrown - } - - globalContext = env->NewGlobalRef( - reinterpret_cast(context)); - if (!globalContext) { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1, - "%s: could not create context reference", __FUNCTION__); - return -1; - } - } - else { // User is resetting the env variable - WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1, - "%s: env is NULL, assuming deinit", __FUNCTION__); - - if (!env) { - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, -1, - "%s: saved env already NULL", __FUNCTION__); - return 0; - } - - env->DeleteGlobalRef(globalScClass); - globalScClass = reinterpret_cast(NULL); - - env->DeleteGlobalRef(globalContext); - globalContext = reinterpret_cast(NULL); - } - - return 0; -} - -// ============================================================================ -// Construction & Destruction -// ============================================================================ - -// ---------------------------------------------------------------------------- -// AudioDeviceAndroidJni - ctor -// ---------------------------------------------------------------------------- - -AudioDeviceAndroidJni::AudioDeviceAndroidJni(const int32_t id) : - _ptrAudioBuffer(NULL), - _critSect(*CriticalSectionWrapper::CreateCriticalSection()), - _id(id), - _timeEventRec(*EventWrapper::Create()), - _timeEventPlay(*EventWrapper::Create()), - _recStartStopEvent(*EventWrapper::Create()), - _playStartStopEvent(*EventWrapper::Create()), - _ptrThreadPlay(NULL), - _ptrThreadRec(NULL), - _recThreadID(0), - _playThreadID(0), - _playThreadIsInitialized(false), - _recThreadIsInitialized(false), - _shutdownPlayThread(false), - _shutdownRecThread(false), - // _recBuffer[2*REC_BUF_SIZE_IN_SAMPLES] - _recordingDeviceIsSpecified(false), - _playoutDeviceIsSpecified(false), _initialized(false), - _recording(false), _playing(false), _recIsInitialized(false), - _playIsInitialized(false), _micIsInitialized(false), - _speakerIsInitialized(false), _startRec(false), - _startPlay(false), _playWarning(0), - _playError(0), _recWarning(0), _recError(0), _delayPlayout(0), - _delayRecording(0), - _AGC(false), - _samplingFreqIn((N_REC_SAMPLES_PER_SEC)), - _samplingFreqOut((N_PLAY_SAMPLES_PER_SEC)), - _maxSpeakerVolume(0), - _loudSpeakerOn(false), - _recAudioSource(1), // 1 is AudioSource.MIC which is our default - _javaVM(NULL), _jniEnvPlay(NULL), - _jniEnvRec(NULL), _javaScClass(0), _javaScObj(0), - _javaPlayBuffer(0), _javaRecBuffer(0), _javaDirectPlayBuffer(NULL), - _javaDirectRecBuffer(NULL), _javaMidPlayAudio(0), - _javaMidRecAudio(0) -{ - WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id, - "%s created", __FUNCTION__); - - memset(_recBuffer, 0, sizeof(_recBuffer)); -} - -// ---------------------------------------------------------------------------- -// AudioDeviceAndroidJni - dtor -// ---------------------------------------------------------------------------- - -AudioDeviceAndroidJni::~AudioDeviceAndroidJni() -{ - WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, - "%s destroyed", __FUNCTION__); - - Terminate(); - - delete &_recStartStopEvent; - delete &_playStartStopEvent; - delete &_timeEventRec; - delete &_timeEventPlay; - delete &_critSect; -} - -// ============================================================================ -// API -// ============================================================================ - -// ---------------------------------------------------------------------------- -// AttachAudioBuffer -// ---------------------------------------------------------------------------- - -void AudioDeviceAndroidJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) -{ - - CriticalSectionScoped lock(&_critSect); - - _ptrAudioBuffer = audioBuffer; - - // inform the AudioBuffer about default settings for this implementation - _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC); - _ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC); - _ptrAudioBuffer->SetRecordingChannels(N_REC_CHANNELS); - _ptrAudioBuffer->SetPlayoutChannels(N_PLAY_CHANNELS); -} - -// ---------------------------------------------------------------------------- -// ActiveAudioLayer -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::ActiveAudioLayer( - AudioDeviceModule::AudioLayer& audioLayer) const -{ - - audioLayer = AudioDeviceModule::kPlatformDefaultAudio; - - return 0; -} - -// ---------------------------------------------------------------------------- -// Init -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::Init() -{ - - CriticalSectionScoped lock(&_critSect); - - if (_initialized) - { - return 0; - } - - _playWarning = 0; - _playError = 0; - _recWarning = 0; - _recError = 0; - - // Init Java member variables - // and set up JNI interface to - // AudioDeviceAndroid java class - if (InitJavaResources() != 0) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "%s: Failed to init Java resources", __FUNCTION__); - return -1; - } - - // Check the sample rate to be used for playback and recording - // and the max playout volume - if (InitSampleRate() != 0) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "%s: Failed to init samplerate", __FUNCTION__); - return -1; - } - - // RECORDING - const char* threadName = "jni_audio_capture_thread"; - _ptrThreadRec = ThreadWrapper::CreateThread(RecThreadFunc, this, - kRealtimePriority, threadName); - if (_ptrThreadRec == NULL) - { - WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, - " failed to create the rec audio thread"); - return -1; - } - - unsigned int threadID(0); - if (!_ptrThreadRec->Start(threadID)) - { - WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, - " failed to start the rec audio thread"); - delete _ptrThreadRec; - _ptrThreadRec = NULL; - return -1; - } - _recThreadID = threadID; - - // PLAYOUT - threadName = "jni_audio_render_thread"; - _ptrThreadPlay = ThreadWrapper::CreateThread(PlayThreadFunc, this, - kRealtimePriority, threadName); - if (_ptrThreadPlay == NULL) - { - WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, - " failed to create the play audio thread"); - return -1; - } - - threadID = 0; - if (!_ptrThreadPlay->Start(threadID)) - { - WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, - " failed to start the play audio thread"); - delete _ptrThreadPlay; - _ptrThreadPlay = NULL; - return -1; - } - _playThreadID = threadID; - - _initialized = true; - - return 0; -} - -// ---------------------------------------------------------------------------- -// Terminate -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::Terminate() -{ - - CriticalSectionScoped lock(&_critSect); - - if (!_initialized) - { - return 0; - } - - // RECORDING - StopRecording(); - _shutdownRecThread = true; - _timeEventRec.Set(); // Release rec thread from waiting state - if (_ptrThreadRec) - { - // First, the thread must detach itself from Java VM - _critSect.Leave(); - if (kEventSignaled != _recStartStopEvent.Wait(5000)) - { - WEBRTC_TRACE( - kTraceError, - kTraceAudioDevice, - _id, - "%s: Recording thread shutdown timed out, cannot " - "terminate thread", - __FUNCTION__); - // If we close thread anyway, the app will crash - return -1; - } - _recStartStopEvent.Reset(); - _critSect.Enter(); - - // Close down rec thread - ThreadWrapper* tmpThread = _ptrThreadRec; - _ptrThreadRec = NULL; - _critSect.Leave(); - tmpThread->SetNotAlive(); - // Release again, we might have returned to waiting state - _timeEventRec.Set(); - if (tmpThread->Stop()) - { - delete tmpThread; - _jniEnvRec = NULL; - } - else - { - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " failed to close down the rec audio thread"); - } - _critSect.Enter(); - - _recThreadIsInitialized = false; - } - _micIsInitialized = false; - _recordingDeviceIsSpecified = false; - - // PLAYOUT - StopPlayout(); - _shutdownPlayThread = true; - _timeEventPlay.Set(); // Release rec thread from waiting state - if (_ptrThreadPlay) - { - // First, the thread must detach itself from Java VM - _critSect.Leave(); - if (kEventSignaled != _playStartStopEvent.Wait(5000)) - { - WEBRTC_TRACE( - kTraceError, - kTraceAudioDevice, - _id, - "%s: Playout thread shutdown timed out, cannot " - "terminate thread", - __FUNCTION__); - // If we close thread anyway, the app will crash - return -1; - } - _playStartStopEvent.Reset(); - _critSect.Enter(); - - // Close down play thread - ThreadWrapper* tmpThread = _ptrThreadPlay; - _ptrThreadPlay = NULL; - _critSect.Leave(); - tmpThread->SetNotAlive(); - _timeEventPlay.Set(); - if (tmpThread->Stop()) - { - delete tmpThread; - _jniEnvPlay = NULL; - } - else - { - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " failed to close down the play audio thread"); - } - _critSect.Enter(); - - _playThreadIsInitialized = false; - } - _speakerIsInitialized = false; - _playoutDeviceIsSpecified = false; - - // get the JNI env for this thread - JNIEnv *env; - bool isAttached = false; - - // get the JNI env for this thread - if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = _javaVM->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); - return -1; - } - isAttached = true; - } - - // Make method IDs and buffer pointers unusable - _javaMidPlayAudio = 0; - _javaMidRecAudio = 0; - _javaDirectPlayBuffer = NULL; - _javaDirectRecBuffer = NULL; - - // Delete the references to the java buffers, this allows the - // garbage collector to delete them - env->DeleteGlobalRef(_javaPlayBuffer); - _javaPlayBuffer = 0; - env->DeleteGlobalRef(_javaRecBuffer); - _javaRecBuffer = 0; - - // Delete the references to the java object and class, this allows the - // garbage collector to delete them - env->DeleteGlobalRef(_javaScObj); - _javaScObj = 0; - _javaScClass = 0; - - // Detach this thread if it was attached - if (isAttached) - { - if (_javaVM->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - "%s: Could not detach thread from JVM", __FUNCTION__); - } - } - - _initialized = false; - - return 0; -} - -// ---------------------------------------------------------------------------- -// Initialized -// ---------------------------------------------------------------------------- - -bool AudioDeviceAndroidJni::Initialized() const -{ - - return (_initialized); -} - -// ---------------------------------------------------------------------------- -// SpeakerIsAvailable -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::SpeakerIsAvailable(bool& available) -{ - - // We always assume it's available - available = true; - - return 0; -} - -// ---------------------------------------------------------------------------- -// InitSpeaker -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::InitSpeaker() -{ - - CriticalSectionScoped lock(&_critSect); - - if (_playing) - { - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " Playout already started"); - return -1; - } - - if (!_playoutDeviceIsSpecified) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Playout device is not specified"); - return -1; - } - - // Nothing needs to be done here, we use a flag to have consistent - // behavior with other platforms - _speakerIsInitialized = true; - - return 0; -} - -// ---------------------------------------------------------------------------- -// MicrophoneIsAvailable -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::MicrophoneIsAvailable(bool& available) -{ - - // We always assume it's available - available = true; - - return 0; -} - -// ---------------------------------------------------------------------------- -// InitMicrophone -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::InitMicrophone() -{ - - CriticalSectionScoped lock(&_critSect); - - if (_recording) - { - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " Recording already started"); - return -1; - } - - if (!_recordingDeviceIsSpecified) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Recording device is not specified"); - return -1; - } - - // Nothing needs to be done here, we use a flag to have consistent - // behavior with other platforms - _micIsInitialized = true; - - return 0; -} - -// ---------------------------------------------------------------------------- -// SpeakerIsInitialized -// ---------------------------------------------------------------------------- - -bool AudioDeviceAndroidJni::SpeakerIsInitialized() const -{ - - return _speakerIsInitialized; -} - -// ---------------------------------------------------------------------------- -// MicrophoneIsInitialized -// ---------------------------------------------------------------------------- - -bool AudioDeviceAndroidJni::MicrophoneIsInitialized() const -{ - - return _micIsInitialized; -} - -// ---------------------------------------------------------------------------- -// SpeakerVolumeIsAvailable -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::SpeakerVolumeIsAvailable(bool& available) -{ - - available = true; // We assume we are always be able to set/get volume - - return 0; -} - -// ---------------------------------------------------------------------------- -// SetSpeakerVolume -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::SetSpeakerVolume(uint32_t volume) -{ - - if (!_speakerIsInitialized) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Speaker not initialized"); - return -1; - } - if (!globalContext) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Context is not set"); - return -1; - } - - // get the JNI env for this thread - JNIEnv *env; - bool isAttached = false; - - if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = _javaVM->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Could not attach thread to JVM (%d, %p)", res, env); - return -1; - } - isAttached = true; - } - - // get the method ID - jmethodID setPlayoutVolumeID = env->GetMethodID(_javaScClass, - "SetPlayoutVolume", "(I)I"); - - // call java sc object method - jint res = env->CallIntMethod(_javaScObj, setPlayoutVolumeID, - static_cast (volume)); - if (res < 0) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "SetPlayoutVolume failed (%d)", res); - return -1; - } - - // Detach this thread if it was attached - if (isAttached) - { - if (_javaVM->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " Could not detach thread from JVM"); - } - } - - return 0; -} - -// ---------------------------------------------------------------------------- -// SpeakerVolume -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::SpeakerVolume(uint32_t& volume) const -{ - - if (!_speakerIsInitialized) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Speaker not initialized"); - return -1; - } - if (!globalContext) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Context is not set"); - return -1; - } - - // get the JNI env for this thread - JNIEnv *env; - bool isAttached = false; - - if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = _javaVM->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Could not attach thread to JVM (%d, %p)", res, env); - return -1; - } - isAttached = true; - } - - // get the method ID - jmethodID getPlayoutVolumeID = env->GetMethodID(_javaScClass, - "GetPlayoutVolume", "()I"); - - // call java sc object method - jint level = env->CallIntMethod(_javaScObj, getPlayoutVolumeID); - if (level < 0) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "GetPlayoutVolume failed (%d)", level); - return -1; - } - - // Detach this thread if it was attached - if (isAttached) - { - if (_javaVM->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " Could not detach thread from JVM"); - } - } - - volume = static_cast (level); - - return 0; -} - -// ---------------------------------------------------------------------------- -// SetWaveOutVolume -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::SetWaveOutVolume( - uint16_t /*volumeLeft*/, - uint16_t /*volumeRight*/) -{ - - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " API call not supported on this platform"); - return -1; -} - -// ---------------------------------------------------------------------------- -// WaveOutVolume -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::WaveOutVolume( - uint16_t& /*volumeLeft*/, - uint16_t& /*volumeRight*/) const -{ - - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " API call not supported on this platform"); - return -1; -} - -// ---------------------------------------------------------------------------- -// MaxSpeakerVolume -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::MaxSpeakerVolume( - uint32_t& maxVolume) const -{ - - if (!_speakerIsInitialized) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Speaker not initialized"); - return -1; - } - - maxVolume = _maxSpeakerVolume; - - return 0; -} - -// ---------------------------------------------------------------------------- -// MinSpeakerVolume -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::MinSpeakerVolume( - uint32_t& minVolume) const -{ - - if (!_speakerIsInitialized) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Speaker not initialized"); - return -1; - } - - minVolume = 0; - - return 0; -} - -// ---------------------------------------------------------------------------- -// SpeakerVolumeStepSize -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::SpeakerVolumeStepSize( - uint16_t& stepSize) const -{ - - if (!_speakerIsInitialized) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Speaker not initialized"); - return -1; - } - - stepSize = 1; - - return 0; -} - -// ---------------------------------------------------------------------------- -// SpeakerMuteIsAvailable -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::SpeakerMuteIsAvailable(bool& available) -{ - - available = false; // Speaker mute not supported on Android - - return 0; -} - -// ---------------------------------------------------------------------------- -// SetSpeakerMute -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::SetSpeakerMute(bool /*enable*/) -{ - - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " API call not supported on this platform"); - return -1; -} - -// ---------------------------------------------------------------------------- -// SpeakerMute -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::SpeakerMute(bool& /*enabled*/) const -{ - - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " API call not supported on this platform"); - return -1; -} - -// ---------------------------------------------------------------------------- -// MicrophoneMuteIsAvailable -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::MicrophoneMuteIsAvailable(bool& available) -{ - - available = false; // Mic mute not supported on Android - - return 0; -} - -// ---------------------------------------------------------------------------- -// SetMicrophoneMute -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::SetMicrophoneMute(bool /*enable*/) -{ - - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " API call not supported on this platform"); - return -1; -} - -// ---------------------------------------------------------------------------- -// MicrophoneMute -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::MicrophoneMute(bool& /*enabled*/) const -{ - - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " API call not supported on this platform"); - return -1; -} - -// ---------------------------------------------------------------------------- -// MicrophoneBoostIsAvailable -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::MicrophoneBoostIsAvailable(bool& available) -{ - - available = false; // Mic boost not supported on Android - - return 0; -} - -// ---------------------------------------------------------------------------- -// SetMicrophoneBoost -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::SetMicrophoneBoost(bool enable) -{ - - if (!_micIsInitialized) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Microphone not initialized"); - return -1; - } - - if (enable) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Enabling not available"); - return -1; - } - - return 0; -} - -// ---------------------------------------------------------------------------- -// MicrophoneBoost -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::MicrophoneBoost(bool& enabled) const -{ - - if (!_micIsInitialized) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Microphone not initialized"); - return -1; - } - - enabled = false; - - return 0; -} - -// ---------------------------------------------------------------------------- -// StereoRecordingIsAvailable -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::StereoRecordingIsAvailable(bool& available) -{ - - available = false; // Stereo recording not supported on Android - - return 0; -} - -// ---------------------------------------------------------------------------- -// SetStereoRecording -// -// Specifies the number of input channels. -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::SetStereoRecording(bool enable) -{ - - if (enable) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Enabling not available"); - return -1; - } - - return 0; -} - -// ---------------------------------------------------------------------------- -// StereoRecording -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::StereoRecording(bool& enabled) const -{ - - enabled = false; - - return 0; -} - -// ---------------------------------------------------------------------------- -// StereoPlayoutIsAvailable -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::StereoPlayoutIsAvailable(bool& available) -{ - - available = false; // Stereo playout not supported on Android - - return 0; -} - -// ---------------------------------------------------------------------------- -// SetStereoPlayout -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::SetStereoPlayout(bool enable) -{ - - if (enable) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Enabling not available"); - return -1; - } - - return 0; -} - -// ---------------------------------------------------------------------------- -// StereoPlayout -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::StereoPlayout(bool& enabled) const -{ - - enabled = false; - - return 0; -} - -// ---------------------------------------------------------------------------- -// SetAGC -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::SetAGC(bool enable) -{ - - _AGC = enable; - - return 0; -} - -// ---------------------------------------------------------------------------- -// AGC -// ---------------------------------------------------------------------------- - -bool AudioDeviceAndroidJni::AGC() const -{ - - return _AGC; -} - -// ---------------------------------------------------------------------------- -// MicrophoneVolumeIsAvailable -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::MicrophoneVolumeIsAvailable( - bool& available) -{ - - available = false; // Mic volume not supported on Android - - return 0; -} - -// ---------------------------------------------------------------------------- -// SetMicrophoneVolume -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::SetMicrophoneVolume( - uint32_t /*volume*/) -{ - - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " API call not supported on this platform"); - return -1; -} - -// ---------------------------------------------------------------------------- -// MicrophoneVolume -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::MicrophoneVolume( - uint32_t& /*volume*/) const -{ - - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " API call not supported on this platform"); - return -1; -} - -// ---------------------------------------------------------------------------- -// MaxMicrophoneVolume -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::MaxMicrophoneVolume( - uint32_t& /*maxVolume*/) const -{ - - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " API call not supported on this platform"); - return -1; -} - -// ---------------------------------------------------------------------------- -// MinMicrophoneVolume -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::MinMicrophoneVolume( - uint32_t& /*minVolume*/) const -{ - - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " API call not supported on this platform"); - return -1; -} - -// ---------------------------------------------------------------------------- -// MicrophoneVolumeStepSize -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::MicrophoneVolumeStepSize( - uint16_t& /*stepSize*/) const -{ - - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " API call not supported on this platform"); - return -1; -} - -// ---------------------------------------------------------------------------- -// PlayoutDevices -// ---------------------------------------------------------------------------- - -int16_t AudioDeviceAndroidJni::PlayoutDevices() -{ - - // There is one device only - return 1; -} - -// ---------------------------------------------------------------------------- -// SetPlayoutDevice I (II) -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::SetPlayoutDevice(uint16_t index) -{ - - if (_playIsInitialized) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Playout already initialized"); - return -1; - } - - if (0 != index) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Device index is out of range [0,0]"); - return -1; - } - - // Do nothing but set a flag, this is to have consistent behavior - // with other platforms - _playoutDeviceIsSpecified = true; - - return 0; -} - -// ---------------------------------------------------------------------------- -// SetPlayoutDevice II (II) -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::SetPlayoutDevice( - AudioDeviceModule::WindowsDeviceType /*device*/) -{ - - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " API call not supported on this platform"); - return -1; -} - -// ---------------------------------------------------------------------------- -// PlayoutDeviceName -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::PlayoutDeviceName( - uint16_t index, - char name[kAdmMaxDeviceNameSize], - char guid[kAdmMaxGuidSize]) -{ - - if (0 != index) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Device index is out of range [0,0]"); - return -1; - } - - // Return empty string - memset(name, 0, kAdmMaxDeviceNameSize); - - if (guid) - { - memset(guid, 0, kAdmMaxGuidSize); - } - - return 0; -} - -// ---------------------------------------------------------------------------- -// RecordingDeviceName -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::RecordingDeviceName( - uint16_t index, - char name[kAdmMaxDeviceNameSize], - char guid[kAdmMaxGuidSize]) -{ - - if (0 != index) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Device index is out of range [0,0]"); - return -1; - } - - // Return empty string - memset(name, 0, kAdmMaxDeviceNameSize); - - if (guid) - { - memset(guid, 0, kAdmMaxGuidSize); - } - - return 0; -} - -// ---------------------------------------------------------------------------- -// RecordingDevices -// ---------------------------------------------------------------------------- - -int16_t AudioDeviceAndroidJni::RecordingDevices() -{ - - // There is one device only - return 1; -} - -// ---------------------------------------------------------------------------- -// SetRecordingDevice I (II) -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::SetRecordingDevice(uint16_t index) -{ - - if (_recIsInitialized) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Recording already initialized"); - return -1; - } - - // Recording device index will be used for specifying recording - // audio source, allow any value - _recAudioSource = index; - _recordingDeviceIsSpecified = true; - - return 0; -} - -// ---------------------------------------------------------------------------- -// SetRecordingDevice II (II) -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::SetRecordingDevice( - AudioDeviceModule::WindowsDeviceType /*device*/) -{ - - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " API call not supported on this platform"); - return -1; -} - -// ---------------------------------------------------------------------------- -// PlayoutIsAvailable -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::PlayoutIsAvailable(bool& available) -{ - - available = false; - - // Try to initialize the playout side - int32_t res = InitPlayout(); - - // Cancel effect of initialization - StopPlayout(); - - if (res != -1) - { - available = true; - } - - return res; -} - -// ---------------------------------------------------------------------------- -// RecordingIsAvailable -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::RecordingIsAvailable(bool& available) -{ - - available = false; - - // Try to initialize the playout side - int32_t res = InitRecording(); - - // Cancel effect of initialization - StopRecording(); - - if (res != -1) - { - available = true; - } - - return res; -} - -// ---------------------------------------------------------------------------- -// InitPlayout -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::InitPlayout() -{ - - CriticalSectionScoped lock(&_critSect); - - if (!_initialized) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Not initialized"); - return -1; - } - - if (_playing) - { - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " Playout already started"); - return -1; - } - - if (!_playoutDeviceIsSpecified) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Playout device is not specified"); - return -1; - } - - if (_playIsInitialized) - { - WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, - " Playout already initialized"); - return 0; - } - - // Initialize the speaker - if (InitSpeaker() == -1) - { - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " InitSpeaker() failed"); - } - - // get the JNI env for this thread - JNIEnv *env; - bool isAttached = false; - - // get the JNI env for this thread - if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) - { - WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, - "attaching"); - - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = _javaVM->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Could not attach thread to JVM (%d, %p)", res, env); - return -1; - } - isAttached = true; - } - - // get the method ID - jmethodID initPlaybackID = env->GetMethodID(_javaScClass, "InitPlayback", - "(I)I"); - int retVal = -1; - - // Call java sc object method - jint res = env->CallIntMethod(_javaScObj, initPlaybackID, _samplingFreqOut); - if (res < 0) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "InitPlayback failed (%d)", res); - } - else - { - // Set the audio device buffer sampling rate - _ptrAudioBuffer->SetPlayoutSampleRate(_samplingFreqOut); - _playIsInitialized = true; - retVal = 0; - } - - // Detach this thread if it was attached - if (isAttached) - { - WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, - "detaching"); - if (_javaVM->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " Could not detach thread from JVM"); - } - } - - return retVal; -} - -// ---------------------------------------------------------------------------- -// InitRecording -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::InitRecording() -{ - - CriticalSectionScoped lock(&_critSect); - - if (!_initialized) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Not initialized"); - return -1; - } - - if (_recording) - { - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " Recording already started"); - return -1; - } - - if (!_recordingDeviceIsSpecified) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Recording device is not specified"); - return -1; - } - - if (_recIsInitialized) - { - WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, - " Recording already initialized"); - return 0; - } - - // Initialize the microphone - if (InitMicrophone() == -1) - { - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " InitMicrophone() failed"); - } - - // get the JNI env for this thread - JNIEnv *env; - bool isAttached = false; - - // get the JNI env for this thread - if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = _javaVM->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Could not attach thread to JVM (%d, %p)", res, env); - return -1; - } - isAttached = true; - } - - // get the method ID - jmethodID initRecordingID = env->GetMethodID(_javaScClass, "InitRecording", - "(II)I"); - int retVal = -1; - - // call java sc object method - jint res = env->CallIntMethod(_javaScObj, initRecordingID, _recAudioSource, - _samplingFreqIn); - if (res < 0) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "InitRecording failed (%d)", res); - } - else - { - // Set the audio device buffer sampling rate - _ptrAudioBuffer->SetRecordingSampleRate(_samplingFreqIn); - - // the init rec function returns a fixed delay - _delayRecording = (res * 1000) / _samplingFreqIn; - - _recIsInitialized = true; - retVal = 0; - } - - // Detach this thread if it was attached - if (isAttached) - { - if (_javaVM->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " Could not detach thread from JVM"); - } - } - - return retVal; -} - -// ---------------------------------------------------------------------------- -// StartRecording -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::StartRecording() -{ - - CriticalSectionScoped lock(&_critSect); - - if (!_recIsInitialized) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Recording not initialized"); - return -1; - } - - if (_recording) - { - WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, - " Recording already started"); - return 0; - } - - // get the JNI env for this thread - JNIEnv *env; - bool isAttached = false; - - // get the JNI env for this thread - if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = _javaVM->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Could not attach thread to JVM (%d, %p)", res, env); - return -1; - } - isAttached = true; - } - - // get the method ID - jmethodID startRecordingID = env->GetMethodID(_javaScClass, - "StartRecording", "()I"); - - // Call java sc object method - jint res = env->CallIntMethod(_javaScObj, startRecordingID); - if (res < 0) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "StartRecording failed (%d)", res); - return -1; - } - - _recWarning = 0; - _recError = 0; - - // Signal to recording thread that we want to start - _startRec = true; - _timeEventRec.Set(); // Release thread from waiting state - _critSect.Leave(); - // Wait for thread to init - if (kEventSignaled != _recStartStopEvent.Wait(5000)) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Timeout or error starting"); - } - _recStartStopEvent.Reset(); - _critSect.Enter(); - - // Detach this thread if it was attached - if (isAttached) - { - if (_javaVM->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " Could not detach thread from JVM"); - } - } - - return 0; -} - -// ---------------------------------------------------------------------------- -// StopRecording -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::StopRecording() - -{ - - CriticalSectionScoped lock(&_critSect); - - if (!_recIsInitialized) - { - WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, - " Recording is not initialized"); - return 0; - } - - // make sure we don't start recording (it's asynchronous), - // assuming that we are under lock - _startRec = false; - - // get the JNI env for this thread - JNIEnv *env; - bool isAttached = false; - - // get the JNI env for this thread - if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = _javaVM->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Could not attach thread to JVM (%d, %p)", res, env); - return -1; - } - isAttached = true; - } - - // get the method ID - jmethodID stopRecordingID = env->GetMethodID(_javaScClass, "StopRecording", - "()I"); - - // Call java sc object method - jint res = env->CallIntMethod(_javaScObj, stopRecordingID); - if (res < 0) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "StopRecording failed (%d)", res); - } - - _recIsInitialized = false; - _recording = false; - _recWarning = 0; - _recError = 0; - - // Detach this thread if it was attached - if (isAttached) - { - if (_javaVM->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " Could not detach thread from JVM"); - } - } - - return 0; -} - -// ---------------------------------------------------------------------------- -// RecordingIsInitialized -// ---------------------------------------------------------------------------- - -bool AudioDeviceAndroidJni::RecordingIsInitialized() const -{ - - return _recIsInitialized; -} - -// ---------------------------------------------------------------------------- -// Recording -// ---------------------------------------------------------------------------- - -bool AudioDeviceAndroidJni::Recording() const -{ - - return _recording; -} - -// ---------------------------------------------------------------------------- -// PlayoutIsInitialized -// ---------------------------------------------------------------------------- - -bool AudioDeviceAndroidJni::PlayoutIsInitialized() const -{ - - return _playIsInitialized; -} - -// ---------------------------------------------------------------------------- -// StartPlayout -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::StartPlayout() -{ - - CriticalSectionScoped lock(&_critSect); - - if (!_playIsInitialized) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Playout not initialized"); - return -1; - } - - if (_playing) - { - WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, - " Playout already started"); - return 0; - } - - // get the JNI env for this thread - JNIEnv *env; - bool isAttached = false; - - // get the JNI env for this thread - if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = _javaVM->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Could not attach thread to JVM (%d, %p)", res, env); - return -1; - } - isAttached = true; - } - - // get the method ID - jmethodID startPlaybackID = env->GetMethodID(_javaScClass, "StartPlayback", - "()I"); - - // Call java sc object method - jint res = env->CallIntMethod(_javaScObj, startPlaybackID); - if (res < 0) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "StartPlayback failed (%d)", res); - return -1; - } - - _playWarning = 0; - _playError = 0; - - // Signal to playout thread that we want to start - _startPlay = true; - _timeEventPlay.Set(); // Release thread from waiting state - _critSect.Leave(); - // Wait for thread to init - if (kEventSignaled != _playStartStopEvent.Wait(5000)) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Timeout or error starting"); - } - _playStartStopEvent.Reset(); - _critSect.Enter(); - - // Detach this thread if it was attached - if (isAttached) - { - if (_javaVM->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " Could not detach thread from JVM"); - } - } - - return 0; -} - -// ---------------------------------------------------------------------------- -// StopPlayout -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::StopPlayout() -{ - - CriticalSectionScoped lock(&_critSect); - - if (!_playIsInitialized) - { - WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, - " Playout is not initialized"); - return 0; - } - - // get the JNI env for this thread - JNIEnv *env; - bool isAttached = false; - - // get the JNI env for this thread - if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = _javaVM->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Could not attach thread to JVM (%d, %p)", res, env); - return -1; - } - isAttached = true; - } - - // get the method ID - jmethodID stopPlaybackID = env->GetMethodID(_javaScClass, "StopPlayback", - "()I"); - - // Call java sc object method - jint res = env->CallIntMethod(_javaScObj, stopPlaybackID); - if (res < 0) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "StopPlayback failed (%d)", res); - } - - _playIsInitialized = false; - _playing = false; - _playWarning = 0; - _playError = 0; - - // Detach this thread if it was attached - if (isAttached) - { - if (_javaVM->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " Could not detach thread from JVM"); - } - } - - return 0; -} - -// ---------------------------------------------------------------------------- -// PlayoutDelay -// -// Remaining amount of data still in the playout buffer. -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::PlayoutDelay(uint16_t& delayMS) const -{ - delayMS = _delayPlayout; - - return 0; -} - -// ---------------------------------------------------------------------------- -// RecordingDelay -// -// Remaining amount of data still in the recording buffer. -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::RecordingDelay( - uint16_t& delayMS) const -{ - delayMS = _delayRecording; - - return 0; -} - -// ---------------------------------------------------------------------------- -// Playing -// ---------------------------------------------------------------------------- - -bool AudioDeviceAndroidJni::Playing() const -{ - - return _playing; -} - -// ---------------------------------------------------------------------------- -// SetPlayoutBuffer -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::SetPlayoutBuffer( - const AudioDeviceModule::BufferType /*type*/, - uint16_t /*sizeMS*/) -{ - - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " API call not supported on this platform"); - return -1; -} - -// ---------------------------------------------------------------------------- -// PlayoutBuffer -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::PlayoutBuffer( - AudioDeviceModule::BufferType& type, - uint16_t& sizeMS) const -{ - - type = AudioDeviceModule::kAdaptiveBufferSize; - sizeMS = _delayPlayout; // Set to current playout delay - - return 0; -} - -// ---------------------------------------------------------------------------- -// CPULoad -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::CPULoad(uint16_t& /*load*/) const -{ - - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " API call not supported on this platform"); - return -1; -} - -// ---------------------------------------------------------------------------- -// PlayoutWarning -// ---------------------------------------------------------------------------- - -bool AudioDeviceAndroidJni::PlayoutWarning() const -{ - return (_playWarning > 0); -} - -// ---------------------------------------------------------------------------- -// PlayoutError -// ---------------------------------------------------------------------------- - -bool AudioDeviceAndroidJni::PlayoutError() const -{ - return (_playError > 0); -} - -// ---------------------------------------------------------------------------- -// RecordingWarning -// ---------------------------------------------------------------------------- - -bool AudioDeviceAndroidJni::RecordingWarning() const -{ - return (_recWarning > 0); -} - -// ---------------------------------------------------------------------------- -// RecordingError -// ---------------------------------------------------------------------------- - -bool AudioDeviceAndroidJni::RecordingError() const -{ - return (_recError > 0); -} - -// ---------------------------------------------------------------------------- -// ClearPlayoutWarning -// ---------------------------------------------------------------------------- - -void AudioDeviceAndroidJni::ClearPlayoutWarning() -{ - _playWarning = 0; -} - -// ---------------------------------------------------------------------------- -// ClearPlayoutError -// ---------------------------------------------------------------------------- - -void AudioDeviceAndroidJni::ClearPlayoutError() -{ - _playError = 0; -} - -// ---------------------------------------------------------------------------- -// ClearRecordingWarning -// ---------------------------------------------------------------------------- - -void AudioDeviceAndroidJni::ClearRecordingWarning() -{ - _recWarning = 0; -} - -// ---------------------------------------------------------------------------- -// ClearRecordingError -// ---------------------------------------------------------------------------- - -void AudioDeviceAndroidJni::ClearRecordingError() -{ - _recError = 0; -} - -// ---------------------------------------------------------------------------- -// SetRecordingSampleRate -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::SetRecordingSampleRate( - const uint32_t samplesPerSec) -{ - - if (samplesPerSec > 48000 || samplesPerSec < 8000) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Invalid sample rate"); - return -1; - } - - // set the recording sample rate to use - _samplingFreqIn = samplesPerSec; - - // Update the AudioDeviceBuffer - _ptrAudioBuffer->SetRecordingSampleRate(samplesPerSec); - - return 0; -} - -// ---------------------------------------------------------------------------- -// SetPlayoutSampleRate -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::SetPlayoutSampleRate( - const uint32_t samplesPerSec) -{ - - if (samplesPerSec > 48000 || samplesPerSec < 8000) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " Invalid sample rate"); - return -1; - } - - // set the playout sample rate to use - _samplingFreqOut = samplesPerSec; - - // Update the AudioDeviceBuffer - _ptrAudioBuffer->SetPlayoutSampleRate(samplesPerSec); - - return 0; -} - -// ---------------------------------------------------------------------------- -// SetLoudspeakerStatus -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::SetLoudspeakerStatus(bool enable) -{ - - if (!globalContext) - { - WEBRTC_TRACE(kTraceError, kTraceUtility, -1, - " Context is not set"); - return -1; - } - - // get the JNI env for this thread - JNIEnv *env; - bool isAttached = false; - - if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = _javaVM->AttachCurrentThread(&env, NULL); - - // Get the JNI env for this thread - if ((res < 0) || !env) - { - WEBRTC_TRACE(kTraceError, kTraceUtility, -1, - " Could not attach thread to JVM (%d, %p)", res, env); - return -1; - } - isAttached = true; - } - - // get the method ID - jmethodID setPlayoutSpeakerID = env->GetMethodID(_javaScClass, - "SetPlayoutSpeaker", - "(Z)I"); - - // call java sc object method - jint res = env->CallIntMethod(_javaScObj, setPlayoutSpeakerID, enable); - if (res < 0) - { - WEBRTC_TRACE(kTraceError, kTraceUtility, -1, - " SetPlayoutSpeaker failed (%d)", res); - return -1; - } - - _loudSpeakerOn = enable; - - // Detach this thread if it was attached - if (isAttached) - { - if (_javaVM->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(kTraceWarning, kTraceUtility, -1, - " Could not detach thread from JVM"); - } - } - - return 0; -} - -// ---------------------------------------------------------------------------- -// GetLoudspeakerStatus -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::GetLoudspeakerStatus(bool& enabled) const -{ - - enabled = _loudSpeakerOn; - - return 0; -} - -// ============================================================================ -// Private Methods -// ============================================================================ - - -// ---------------------------------------------------------------------------- -// InitJavaResources -// -// Initializes needed Java resources like the JNI interface to -// AudioDeviceAndroid.java -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::InitJavaResources() -{ - // todo: Check if we already have created the java object - _javaVM = globalJvm; - _javaScClass = globalScClass; - - // use the jvm that has been set - if (!_javaVM) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "%s: Not a valid Java VM pointer", __FUNCTION__); - return -1; - } - - // get the JNI env for this thread - JNIEnv *env; - bool isAttached = false; - - // get the JNI env for this thread - if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = _javaVM->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); - return -1; - } - isAttached = true; - } - - WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, - "get method id"); - - // get the method ID for the void(void) constructor - jmethodID cid = env->GetMethodID(_javaScClass, "", "()V"); - if (cid == NULL) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "%s: could not get constructor ID", __FUNCTION__); - return -1; /* exception thrown */ - } - - WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, - "%s: construct object", __FUNCTION__); - - // construct the object - jobject javaScObjLocal = env->NewObject(_javaScClass, cid); - if (!javaScObjLocal) - { - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - "%s: could not create Java sc object", __FUNCTION__); - return -1; - } - - // Create a reference to the object (to tell JNI that we are referencing it - // after this function has returned). - _javaScObj = env->NewGlobalRef(javaScObjLocal); - if (!_javaScObj) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "%s: could not create Java sc object reference", - __FUNCTION__); - return -1; - } - - // Delete local object ref, we only use the global ref. - env->DeleteLocalRef(javaScObjLocal); - - ////////////////////// - // AUDIO MANAGEMENT - - // This is not mandatory functionality - if (globalContext) { - jfieldID context_id = env->GetFieldID(globalScClass, - "_context", - "Landroid/content/Context;"); - if (!context_id) { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "%s: could not get _context id", __FUNCTION__); - return -1; - } - - env->SetObjectField(_javaScObj, context_id, globalContext); - jobject javaContext = env->GetObjectField(_javaScObj, context_id); - if (!javaContext) { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "%s: could not set or get _context", __FUNCTION__); - return -1; - } - } - else { - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - "%s: did not set Context - some functionality is not " - "supported", - __FUNCTION__); - } - - ///////////// - // PLAYOUT - - // Get play buffer field ID. - jfieldID fidPlayBuffer = env->GetFieldID(_javaScClass, "_playBuffer", - "Ljava/nio/ByteBuffer;"); - if (!fidPlayBuffer) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "%s: could not get play buffer fid", __FUNCTION__); - return -1; - } - - // Get play buffer object. - jobject javaPlayBufferLocal = - env->GetObjectField(_javaScObj, fidPlayBuffer); - if (!javaPlayBufferLocal) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "%s: could not get play buffer", __FUNCTION__); - return -1; - } - - // Create a global reference to the object (to tell JNI that we are - // referencing it after this function has returned) - // NOTE: we are referencing it only through the direct buffer (see below). - _javaPlayBuffer = env->NewGlobalRef(javaPlayBufferLocal); - if (!_javaPlayBuffer) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "%s: could not get play buffer reference", __FUNCTION__); - return -1; - } - - // Delete local object ref, we only use the global ref. - env->DeleteLocalRef(javaPlayBufferLocal); - - // Get direct buffer. - _javaDirectPlayBuffer = env->GetDirectBufferAddress(_javaPlayBuffer); - if (!_javaDirectPlayBuffer) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "%s: could not get direct play buffer", __FUNCTION__); - return -1; - } - - // Get the play audio method ID. - _javaMidPlayAudio = env->GetMethodID(_javaScClass, "PlayAudio", "(I)I"); - if (!_javaMidPlayAudio) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "%s: could not get play audio mid", __FUNCTION__); - return -1; - } - - ////////////// - // RECORDING - - // Get rec buffer field ID. - jfieldID fidRecBuffer = env->GetFieldID(_javaScClass, "_recBuffer", - "Ljava/nio/ByteBuffer;"); - if (!fidRecBuffer) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "%s: could not get rec buffer fid", __FUNCTION__); - return -1; - } - - // Get rec buffer object. - jobject javaRecBufferLocal = env->GetObjectField(_javaScObj, fidRecBuffer); - if (!javaRecBufferLocal) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "%s: could not get rec buffer", __FUNCTION__); - return -1; - } - - // Create a global reference to the object (to tell JNI that we are - // referencing it after this function has returned) - // NOTE: we are referencing it only through the direct buffer (see below). - _javaRecBuffer = env->NewGlobalRef(javaRecBufferLocal); - if (!_javaRecBuffer) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "%s: could not get rec buffer reference", __FUNCTION__); - return -1; - } - - // Delete local object ref, we only use the global ref. - env->DeleteLocalRef(javaRecBufferLocal); - - // Get direct buffer. - _javaDirectRecBuffer = env->GetDirectBufferAddress(_javaRecBuffer); - if (!_javaDirectRecBuffer) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "%s: could not get direct rec buffer", __FUNCTION__); - return -1; - } - - // Get the rec audio method ID. - _javaMidRecAudio = env->GetMethodID(_javaScClass, "RecordAudio", "(I)I"); - if (!_javaMidRecAudio) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "%s: could not get rec audio mid", __FUNCTION__); - return -1; - } - - // Detach this thread if it was attached. - if (isAttached) - { - if (_javaVM->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - "%s: Could not detach thread from JVM", __FUNCTION__); - } - } - - return 0; -} - -// ---------------------------------------------------------------------------- -// InitSampleRate -// -// checks supported sample rates for playback -// and recording and initializes the rates to be used -// Also stores the max playout volume returned from InitPlayout. -// ---------------------------------------------------------------------------- - -int32_t AudioDeviceAndroidJni::InitSampleRate() -{ - int samplingFreq = 44100; - jint res = 0; - - // get the JNI env for this thread - JNIEnv *env; - bool isAttached = false; - - // get the JNI env for this thread - if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = _javaVM->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); - return -1; - } - isAttached = true; - } - - if (_samplingFreqIn > 0) - { - // read the configured sampling rate - samplingFreq = _samplingFreqIn; - WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, - " Trying configured recording sampling rate %d", - samplingFreq); - } - - // get the method ID - jmethodID initRecordingID = env->GetMethodID(_javaScClass, "InitRecording", - "(II)I"); - - bool keepTrying = true; - while (keepTrying) - { - // call java sc object method - res = env->CallIntMethod(_javaScObj, initRecordingID, _recAudioSource, - samplingFreq); - if (res < 0) - { - switch (samplingFreq) - { - case 44100: - samplingFreq = 16000; - break; - case 16000: - samplingFreq = 8000; - break; - default: // error - WEBRTC_TRACE(kTraceError, - kTraceAudioDevice, _id, - "%s: InitRecording failed (%d)", __FUNCTION__, - res); - return -1; - } - } - else - { - keepTrying = false; - } - } - - // set the recording sample rate to use - _samplingFreqIn = samplingFreq; - - WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, - "Recording sample rate set to (%d)", _samplingFreqIn); - - // get the method ID - jmethodID stopRecordingID = env->GetMethodID(_javaScClass, "StopRecording", - "()I"); - - // Call java sc object method - res = env->CallIntMethod(_javaScObj, stopRecordingID); - if (res < 0) - { - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - "StopRecording failed (%d)", res); - } - - // get the method ID - jmethodID initPlaybackID = env->GetMethodID(_javaScClass, "InitPlayback", - "(I)I"); - - if (_samplingFreqOut > 0) - { - // read the configured sampling rate - samplingFreq = _samplingFreqOut; - WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, - " Trying configured playback sampling rate %d", - samplingFreq); - } - else - { - // set the preferred sampling frequency - if (samplingFreq == 8000) - { - // try 16000 - samplingFreq = 16000; - } - // else use same as recording - } - - keepTrying = true; - while (keepTrying) - { - // call java sc object method - res = env->CallIntMethod(_javaScObj, initPlaybackID, samplingFreq); - if (res < 0) - { - switch (samplingFreq) - { - case 44100: - samplingFreq = 16000; - break; - case 16000: - samplingFreq = 8000; - break; - default: // error - WEBRTC_TRACE(kTraceError, - kTraceAudioDevice, _id, - "InitPlayback failed (%d)", res); - return -1; - } - } - else - { - keepTrying = false; - } - } - - // Store max playout volume - _maxSpeakerVolume = static_cast (res); - if (_maxSpeakerVolume < 1) - { - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - " Did not get valid max speaker volume value (%d)", - _maxSpeakerVolume); - } - - // set the playback sample rate to use - _samplingFreqOut = samplingFreq; - WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, - "Playback sample rate set to (%d)", _samplingFreqOut); - - // get the method ID - jmethodID stopPlaybackID = env->GetMethodID(_javaScClass, "StopPlayback", - "()I"); - - // Call java sc object method - res = env->CallIntMethod(_javaScObj, stopPlaybackID); - if (res < 0) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "StopPlayback failed (%d)", res); - } - - // Detach this thread if it was attached - if (isAttached) - { - if (_javaVM->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, - "%s: Could not detach thread from JVM", __FUNCTION__); - } - } - - return 0; -} - -// ============================================================================ -// Thread Methods -// ============================================================================ - -// ---------------------------------------------------------------------------- -// PlayThreadFunc -// ---------------------------------------------------------------------------- - -bool AudioDeviceAndroidJni::PlayThreadFunc(void* pThis) -{ - return (static_cast (pThis)->PlayThreadProcess()); -} - -// ---------------------------------------------------------------------------- -// RecThreadFunc -// ---------------------------------------------------------------------------- - -bool AudioDeviceAndroidJni::RecThreadFunc(void* pThis) -{ - return (static_cast (pThis)->RecThreadProcess()); -} - -// ---------------------------------------------------------------------------- -// PlayThreadProcess -// ---------------------------------------------------------------------------- - -bool AudioDeviceAndroidJni::PlayThreadProcess() -{ - if (!_playThreadIsInitialized) - { - // Do once when thread is started - - // Attach this thread to JVM and get the JNI env for this thread - jint res = _javaVM->AttachCurrentThread(&_jniEnvPlay, NULL); - if ((res < 0) || !_jniEnvPlay) - { - WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, - _id, - "Could not attach playout thread to JVM (%d, %p)", - res, _jniEnvPlay); - return false; // Close down thread - } - - _playThreadIsInitialized = true; - } - - if (!_playing) - { - switch (_timeEventPlay.Wait(1000)) - { - case kEventSignaled: - WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, - _id, "Playout thread event signal"); - _timeEventPlay.Reset(); - break; - case kEventError: - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, - _id, "Playout thread event error"); - return true; - case kEventTimeout: - WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, - _id, "Playout thread event timeout"); - return true; - } - } - - Lock(); - - if (_startPlay) - { - WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, - "_startPlay true, performing initial actions"); - _startPlay = false; - _playing = true; - _playWarning = 0; - _playError = 0; - _playStartStopEvent.Set(); - WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, - "Sent signal"); - } - - if (_playing) - { - int8_t playBuffer[2 * 480]; // Max 10 ms @ 48 kHz / 16 bit - uint32_t samplesToPlay = _samplingFreqOut / 100; - - // ask for new PCM data to be played out using the AudioDeviceBuffer - // ensure that this callback is executed without taking the - // audio-thread lock - UnLock(); - uint32_t nSamples = - _ptrAudioBuffer->RequestPlayoutData(samplesToPlay); - Lock(); - - // Check again since play may have stopped during unlocked period - if (!_playing) - { - UnLock(); - return true; - } - - nSamples = _ptrAudioBuffer->GetPlayoutData(playBuffer); - if (nSamples != samplesToPlay) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - " invalid number of output samples(%d)", nSamples); - _playWarning = 1; - } - - // Copy data to our direct buffer (held by java sc object) - // todo: Give _javaDirectPlayBuffer directly to VoE? - memcpy(_javaDirectPlayBuffer, playBuffer, nSamples * 2); - - UnLock(); - - // Call java sc object method to process data in direct buffer - // Will block until data has been put in OS playout buffer - // (see java sc class) - jint res = _jniEnvPlay->CallIntMethod(_javaScObj, _javaMidPlayAudio, - 2 * nSamples); - if (res < 0) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "PlayAudio failed (%d)", res); - _playWarning = 1; - } - else if (res > 0) - { - // we are not recording and have got a delay value from playback - _delayPlayout = (res * 1000) / _samplingFreqOut; - } - // If 0 is returned we are recording and then play delay is updated - // in RecordProcess - - Lock(); - - } // _playing - - if (_shutdownPlayThread) - { - WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, - "Detaching thread from Java VM"); - - // Detach thread from Java VM - if (_javaVM->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, - _id, "Could not detach playout thread from JVM"); - _shutdownPlayThread = false; - // If we say OK (i.e. set event) and close thread anyway, - // app will crash - } - else - { - _jniEnvPlay = NULL; - _shutdownPlayThread = false; - _playStartStopEvent.Set(); // Signal to Terminate() that we are done - WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, - "Sent signal"); - } - } - - UnLock(); - return true; -} - -// ---------------------------------------------------------------------------- -// RecThreadProcess -// ---------------------------------------------------------------------------- - -bool AudioDeviceAndroidJni::RecThreadProcess() -{ - if (!_recThreadIsInitialized) - { - // Do once when thread is started - - // Attach this thread to JVM - jint res = _javaVM->AttachCurrentThread(&_jniEnvRec, NULL); - - // Get the JNI env for this thread - if ((res < 0) || !_jniEnvRec) - { - WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, - _id, "Could not attach rec thread to JVM (%d, %p)", - res, _jniEnvRec); - return false; // Close down thread - } - - _recThreadIsInitialized = true; - } - - // just sleep if rec has not started - if (!_recording) - { - switch (_timeEventRec.Wait(1000)) - { - case kEventSignaled: - WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, - _id, "Recording thread event signal"); - _timeEventRec.Reset(); - break; - case kEventError: - WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, - _id, "Recording thread event error"); - return true; - case kEventTimeout: - WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, - _id, "Recording thread event timeout"); - return true; - } - } - - Lock(); - - if (_startRec) - { - WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, - "_startRec true, performing initial actions"); - _startRec = false; - _recording = true; - _recWarning = 0; - _recError = 0; - _recStartStopEvent.Set(); - } - - if (_recording) - { - uint32_t samplesToRec = _samplingFreqIn / 100; - - // Call java sc object method to record data to direct buffer - // Will block until data has been recorded (see java sc class), - // therefore we must release the lock - UnLock(); - jint playDelayInSamples = _jniEnvRec->CallIntMethod(_javaScObj, - _javaMidRecAudio, - 2 * samplesToRec); - if (playDelayInSamples < 0) - { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, - "RecordAudio failed"); - _recWarning = 1; - } - else - { - _delayPlayout = (playDelayInSamples * 1000) / _samplingFreqOut; - } - Lock(); - - // Check again since recording may have stopped during Java call - if (_recording) - { -// WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, -// "total delay is %d", msPlayDelay + _delayRecording); - - // Copy data to our direct buffer (held by java sc object) - // todo: Give _javaDirectRecBuffer directly to VoE? - // todo: Check count <= 480 ? - memcpy(_recBuffer, _javaDirectRecBuffer, 2 * samplesToRec); - - // store the recorded buffer (no action will be taken if the - // #recorded samples is not a full buffer) - _ptrAudioBuffer->SetRecordedBuffer(_recBuffer, samplesToRec); - - // store vqe delay values - _ptrAudioBuffer->SetVQEData(_delayPlayout, _delayRecording, 0); - - // deliver recorded samples at specified sample rate, mic level - // etc. to the observer using callback - UnLock(); - _ptrAudioBuffer->DeliverRecordedData(); - Lock(); - } - - } // _recording - - if (_shutdownRecThread) - { - WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, - "Detaching rec thread from Java VM"); - - // Detach thread from Java VM - if (_javaVM->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, - _id, "Could not detach recording thread from JVM"); - _shutdownRecThread = false; - // If we say OK (i.e. set event) and close thread anyway, - // app will crash - } - else - { - _jniEnvRec = NULL; - _shutdownRecThread = false; - _recStartStopEvent.Set(); // Signal to Terminate() that we are done - - WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, - "Sent signal rec"); - } - } - - UnLock(); - return true; -} - -} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_jni_android.h b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_jni_android.h deleted file mode 100644 index 7eb57cbd7f37..000000000000 --- a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_jni_android.h +++ /dev/null @@ -1,273 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * Android audio device interface (JNI/AudioTrack/AudioRecord usage) - */ - -#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_JNI_ANDROID_H -#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_JNI_ANDROID_H - -#include "webrtc/modules/audio_device/audio_device_generic.h" -#include "webrtc/system_wrappers/interface/critical_section_wrapper.h" - -#include // For accessing AudioDeviceAndroid java class - -#define AudioCaptureClass "org/webrtc/voiceengine/WebRTCAudioDevice" - -namespace webrtc -{ -class EventWrapper; - -class ThreadWrapper; - -class AudioDeviceAndroidJni : public AudioDeviceGeneric { - public: - AudioDeviceAndroidJni(const int32_t id); - ~AudioDeviceAndroidJni(); - - static int32_t SetAndroidAudioDeviceObjects(void* javaVM, - void* context); - - static int32_t SetAndroidAudioDeviceObjects(void* javaVM, - void* env, - void* context); - - virtual int32_t ActiveAudioLayer( - AudioDeviceModule::AudioLayer& audioLayer) const; - - virtual int32_t Init(); - virtual int32_t Terminate(); - virtual bool Initialized() const; - - virtual int16_t PlayoutDevices(); - virtual int16_t RecordingDevices(); - virtual int32_t PlayoutDeviceName(uint16_t index, - char name[kAdmMaxDeviceNameSize], - char guid[kAdmMaxGuidSize]); - virtual int32_t RecordingDeviceName( - uint16_t index, - char name[kAdmMaxDeviceNameSize], - char guid[kAdmMaxGuidSize]); - - virtual int32_t SetPlayoutDevice(uint16_t index); - virtual int32_t SetPlayoutDevice( - AudioDeviceModule::WindowsDeviceType device); - virtual int32_t SetRecordingDevice(uint16_t index); - virtual int32_t SetRecordingDevice( - AudioDeviceModule::WindowsDeviceType device); - - virtual int32_t PlayoutIsAvailable(bool& available); - virtual int32_t InitPlayout(); - virtual bool PlayoutIsInitialized() const; - virtual int32_t RecordingIsAvailable(bool& available); - virtual int32_t InitRecording(); - virtual bool RecordingIsInitialized() const; - - virtual int32_t StartPlayout(); - virtual int32_t StopPlayout(); - virtual bool Playing() const; - virtual int32_t StartRecording(); - virtual int32_t StopRecording(); - virtual bool Recording() const; - - virtual int32_t SetAGC(bool enable); - virtual bool AGC() const; - - virtual int32_t SetWaveOutVolume(uint16_t volumeLeft, uint16_t volumeRight); - virtual int32_t WaveOutVolume(uint16_t& volumeLeft, - uint16_t& volumeRight) const; - - virtual int32_t SpeakerIsAvailable(bool& available); - virtual int32_t InitSpeaker(); - virtual bool SpeakerIsInitialized() const; - virtual int32_t MicrophoneIsAvailable(bool& available); - virtual int32_t InitMicrophone(); - virtual bool MicrophoneIsInitialized() const; - - virtual int32_t SpeakerVolumeIsAvailable(bool& available); - virtual int32_t SetSpeakerVolume(uint32_t volume); - virtual int32_t SpeakerVolume(uint32_t& volume) const; - virtual int32_t MaxSpeakerVolume(uint32_t& maxVolume) const; - virtual int32_t MinSpeakerVolume(uint32_t& minVolume) const; - virtual int32_t SpeakerVolumeStepSize(uint16_t& stepSize) const; - - virtual int32_t MicrophoneVolumeIsAvailable(bool& available); - virtual int32_t SetMicrophoneVolume(uint32_t volume); - virtual int32_t MicrophoneVolume(uint32_t& volume) const; - virtual int32_t MaxMicrophoneVolume(uint32_t& maxVolume) const; - virtual int32_t MinMicrophoneVolume(uint32_t& minVolume) const; - virtual int32_t MicrophoneVolumeStepSize( - uint16_t& stepSize) const; - - virtual int32_t SpeakerMuteIsAvailable(bool& available); - virtual int32_t SetSpeakerMute(bool enable); - virtual int32_t SpeakerMute(bool& enabled) const; - - virtual int32_t MicrophoneMuteIsAvailable(bool& available); - virtual int32_t SetMicrophoneMute(bool enable); - virtual int32_t MicrophoneMute(bool& enabled) const; - - virtual int32_t MicrophoneBoostIsAvailable(bool& available); - virtual int32_t SetMicrophoneBoost(bool enable); - virtual int32_t MicrophoneBoost(bool& enabled) const; - - virtual int32_t StereoPlayoutIsAvailable(bool& available); - virtual int32_t SetStereoPlayout(bool enable); - virtual int32_t StereoPlayout(bool& enabled) const; - virtual int32_t StereoRecordingIsAvailable(bool& available); - virtual int32_t SetStereoRecording(bool enable); - virtual int32_t StereoRecording(bool& enabled) const; - - virtual int32_t SetPlayoutBuffer( - const AudioDeviceModule::BufferType type, uint16_t sizeMS); - virtual int32_t PlayoutBuffer( - AudioDeviceModule::BufferType& type, uint16_t& sizeMS) const; - virtual int32_t PlayoutDelay(uint16_t& delayMS) const; - virtual int32_t RecordingDelay(uint16_t& delayMS) const; - - virtual int32_t CPULoad(uint16_t& load) const; - - virtual bool PlayoutWarning() const; - virtual bool PlayoutError() const; - virtual bool RecordingWarning() const; - virtual bool RecordingError() const; - virtual void ClearPlayoutWarning(); - virtual void ClearPlayoutError(); - virtual void ClearRecordingWarning(); - virtual void ClearRecordingError(); - - virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer); - - virtual int32_t SetRecordingSampleRate( - const uint32_t samplesPerSec); - virtual int32_t SetPlayoutSampleRate( - const uint32_t samplesPerSec); - - virtual int32_t SetLoudspeakerStatus(bool enable); - virtual int32_t GetLoudspeakerStatus(bool& enable) const; - - static const uint32_t N_REC_SAMPLES_PER_SEC = 16000; // Default is 16 kHz - static const uint32_t N_PLAY_SAMPLES_PER_SEC = 16000; // Default is 16 kHz - - static const uint32_t N_REC_CHANNELS = 1; // default is mono recording - static const uint32_t N_PLAY_CHANNELS = 1; // default is mono playout - - static const uint32_t REC_BUF_SIZE_IN_SAMPLES = 480; // Handle max 10 ms @ 48 kHz - - private: - // Lock - void Lock() { - _critSect.Enter(); - }; - void UnLock() { - _critSect.Leave(); - }; - - // Init - int32_t InitJavaResources(); - int32_t InitSampleRate(); - - // Threads - static bool RecThreadFunc(void*); - static bool PlayThreadFunc(void*); - bool RecThreadProcess(); - bool PlayThreadProcess(); - - // Misc - AudioDeviceBuffer* _ptrAudioBuffer; - CriticalSectionWrapper& _critSect; - int32_t _id; - - // Events - EventWrapper& _timeEventRec; - EventWrapper& _timeEventPlay; - EventWrapper& _recStartStopEvent; - EventWrapper& _playStartStopEvent; - - // Threads - ThreadWrapper* _ptrThreadPlay; - ThreadWrapper* _ptrThreadRec; - uint32_t _recThreadID; - uint32_t _playThreadID; - bool _playThreadIsInitialized; - bool _recThreadIsInitialized; - bool _shutdownPlayThread; - bool _shutdownRecThread; - - // Rec buffer - int8_t _recBuffer[2 * REC_BUF_SIZE_IN_SAMPLES]; - - // States - bool _recordingDeviceIsSpecified; - bool _playoutDeviceIsSpecified; - bool _initialized; - bool _recording; - bool _playing; - bool _recIsInitialized; - bool _playIsInitialized; - bool _micIsInitialized; - bool _speakerIsInitialized; - - // Signal flags to threads - bool _startRec; - bool _startPlay; - - // Warnings and errors - uint16_t _playWarning; - uint16_t _playError; - uint16_t _recWarning; - uint16_t _recError; - - // Delay - uint16_t _delayPlayout; - uint16_t _delayRecording; - - // AGC state - bool _AGC; - - // Stored device properties - uint16_t _samplingFreqIn; // Sampling frequency for Mic - uint16_t _samplingFreqOut; // Sampling frequency for Speaker - uint32_t _maxSpeakerVolume; // The maximum speaker volume value - bool _loudSpeakerOn; - // Stores the desired audio source to use, set in SetRecordingDevice - int _recAudioSource; - - // JNI and Java - JavaVM* _javaVM; // denotes a Java VM - - JNIEnv* _jniEnvPlay; // The JNI env for playout thread - JNIEnv* _jniEnvRec; // The JNI env for recording thread - - jclass _javaScClass; // AudioDeviceAndroid class - jobject _javaScObj; // AudioDeviceAndroid object - - // The play buffer field in AudioDeviceAndroid object (global ref) - jobject _javaPlayBuffer; - // The rec buffer field in AudioDeviceAndroid object (global ref) - jobject _javaRecBuffer; - void* _javaDirectPlayBuffer; // Direct buffer pointer to play buffer - void* _javaDirectRecBuffer; // Direct buffer pointer to rec buffer - jmethodID _javaMidPlayAudio; // Method ID of play in AudioDeviceAndroid - jmethodID _javaMidRecAudio; // Method ID of rec in AudioDeviceAndroid - - // TODO(leozwang): Android holds only one JVM, all these jni handling - // will be consolidated into a single place to make it consistant and - // reliable. Chromium has a good example at base/android. - static JavaVM* globalJvm; - static JNIEnv* globalJNIEnv; - static jobject globalContext; - static jclass globalScClass; -}; - -} // namespace webrtc - -#endif // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_JNI_ANDROID_H diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_opensles_android.cc b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_opensles_android.cc deleted file mode 100644 index 989c23427e5a..000000000000 --- a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_opensles_android.cc +++ /dev/null @@ -1,535 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "webrtc/modules/audio_device/android/audio_device_opensles_android.h" - -#include "webrtc/modules/audio_device/android/opensles_input.h" -#include "webrtc/modules/audio_device/android/opensles_output.h" - -namespace webrtc { - -AudioDeviceAndroidOpenSLES::AudioDeviceAndroidOpenSLES(const int32_t id) -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - : output_(id), - input_(id, &output_) -#else - : input_(id, 0) -#endif -{ -} - -AudioDeviceAndroidOpenSLES::~AudioDeviceAndroidOpenSLES() { -} - -int32_t AudioDeviceAndroidOpenSLES::ActiveAudioLayer( - AudioDeviceModule::AudioLayer& audioLayer) const { // NOLINT - return 0; -} - -int32_t AudioDeviceAndroidOpenSLES::Init() { -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.Init() | input_.Init(); -#else - return input_.Init(); -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::Terminate() { -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.Terminate() | input_.Terminate(); -#else - return input_.Terminate(); -#endif -} - -bool AudioDeviceAndroidOpenSLES::Initialized() const { -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.Initialized() && input_.Initialized(); -#else - return input_.Initialized(); -#endif -} - -int16_t AudioDeviceAndroidOpenSLES::PlayoutDevices() { -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.PlayoutDevices(); -#else - return 0; -#endif -} - -int16_t AudioDeviceAndroidOpenSLES::RecordingDevices() { - return input_.RecordingDevices(); -} - -int32_t AudioDeviceAndroidOpenSLES::PlayoutDeviceName( - uint16_t index, - char name[kAdmMaxDeviceNameSize], - char guid[kAdmMaxGuidSize]) { -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.PlayoutDeviceName(index, name, guid); -#else - return -1; -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::RecordingDeviceName( - uint16_t index, - char name[kAdmMaxDeviceNameSize], - char guid[kAdmMaxGuidSize]) { - return input_.RecordingDeviceName(index, name, guid); -} - -int32_t AudioDeviceAndroidOpenSLES::SetPlayoutDevice(uint16_t index) { -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.SetPlayoutDevice(index); -#else - return -1; -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::SetPlayoutDevice( - AudioDeviceModule::WindowsDeviceType device) { -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.SetPlayoutDevice(device); -#else - return -1; -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::SetRecordingDevice(uint16_t index) { - return input_.SetRecordingDevice(index); -} - -int32_t AudioDeviceAndroidOpenSLES::SetRecordingDevice( - AudioDeviceModule::WindowsDeviceType device) { - return input_.SetRecordingDevice(device); -} - -int32_t AudioDeviceAndroidOpenSLES::PlayoutIsAvailable( - bool& available) { // NOLINT -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.PlayoutIsAvailable(available); -#else - return -1; -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::InitPlayout() { -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.InitPlayout(); -#else - return -1; -#endif -} - -bool AudioDeviceAndroidOpenSLES::PlayoutIsInitialized() const { -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.PlayoutIsInitialized(); -#else - return false; -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::RecordingIsAvailable( - bool& available) { // NOLINT - return input_.RecordingIsAvailable(available); -} - -int32_t AudioDeviceAndroidOpenSLES::InitRecording() { - return input_.InitRecording(); -} - -bool AudioDeviceAndroidOpenSLES::RecordingIsInitialized() const { - return input_.RecordingIsInitialized(); -} - -int32_t AudioDeviceAndroidOpenSLES::StartPlayout() { -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.StartPlayout(); -#else - return -1; -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::StopPlayout() { -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.StopPlayout(); -#else - return -1; -#endif -} - -bool AudioDeviceAndroidOpenSLES::Playing() const { -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.Playing(); -#else - return false; -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::StartRecording() { - return input_.StartRecording(); -} - -int32_t AudioDeviceAndroidOpenSLES::StopRecording() { - return input_.StopRecording(); -} - -bool AudioDeviceAndroidOpenSLES::Recording() const { - return input_.Recording() ; -} - -int32_t AudioDeviceAndroidOpenSLES::SetAGC(bool enable) { - return input_.SetAGC(enable); -} - -bool AudioDeviceAndroidOpenSLES::AGC() const { - return input_.AGC(); -} - -int32_t AudioDeviceAndroidOpenSLES::SetWaveOutVolume(uint16_t volumeLeft, - uint16_t volumeRight) { - return -1; -} - -int32_t AudioDeviceAndroidOpenSLES::WaveOutVolume( - uint16_t& volumeLeft, // NOLINT - uint16_t& volumeRight) const { // NOLINT - return -1; -} - -int32_t AudioDeviceAndroidOpenSLES::SpeakerIsAvailable( - bool& available) { // NOLINT -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.SpeakerIsAvailable(available); -#else - return -1; -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::InitSpeaker() { -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.InitSpeaker(); -#else - return -1; -#endif -} - -bool AudioDeviceAndroidOpenSLES::SpeakerIsInitialized() const { -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.SpeakerIsInitialized(); -#else - return false; -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::MicrophoneIsAvailable( - bool& available) { // NOLINT - return input_.MicrophoneIsAvailable(available); -} - -int32_t AudioDeviceAndroidOpenSLES::InitMicrophone() { - return input_.InitMicrophone(); -} - -bool AudioDeviceAndroidOpenSLES::MicrophoneIsInitialized() const { - return input_.MicrophoneIsInitialized(); -} - -int32_t AudioDeviceAndroidOpenSLES::SpeakerVolumeIsAvailable( - bool& available) { // NOLINT -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.SpeakerVolumeIsAvailable(available); -#else - return -1; -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::SetSpeakerVolume(uint32_t volume) { -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.SetSpeakerVolume(volume); -#else - return -1; -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::SpeakerVolume( - uint32_t& volume) const { // NOLINT -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.SpeakerVolume(volume); -#else - return -1; -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::MaxSpeakerVolume( - uint32_t& maxVolume) const { // NOLINT -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.MaxSpeakerVolume(maxVolume); -#else - return -1; -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::MinSpeakerVolume( - uint32_t& minVolume) const { // NOLINT -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.MinSpeakerVolume(minVolume); -#else - return -1; -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::SpeakerVolumeStepSize( - uint16_t& stepSize) const { // NOLINT -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.SpeakerVolumeStepSize(stepSize); -#else - return -1; -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::MicrophoneVolumeIsAvailable( - bool& available) { // NOLINT - return input_.MicrophoneVolumeIsAvailable(available); -} - -int32_t AudioDeviceAndroidOpenSLES::SetMicrophoneVolume(uint32_t volume) { - return input_.SetMicrophoneVolume(volume); -} - -int32_t AudioDeviceAndroidOpenSLES::MicrophoneVolume( - uint32_t& volume) const { // NOLINT - return input_.MicrophoneVolume(volume); -} - -int32_t AudioDeviceAndroidOpenSLES::MaxMicrophoneVolume( - uint32_t& maxVolume) const { // NOLINT - return input_.MaxMicrophoneVolume(maxVolume); -} - -int32_t AudioDeviceAndroidOpenSLES::MinMicrophoneVolume( - uint32_t& minVolume) const { // NOLINT - return input_.MinMicrophoneVolume(minVolume); -} - -int32_t AudioDeviceAndroidOpenSLES::MicrophoneVolumeStepSize( - uint16_t& stepSize) const { // NOLINT - return input_.MicrophoneVolumeStepSize(stepSize); -} - -int32_t AudioDeviceAndroidOpenSLES::SpeakerMuteIsAvailable( - bool& available) { // NOLINT -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.SpeakerMuteIsAvailable(available); -#else - return -1; -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::SetSpeakerMute(bool enable) { -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.SetSpeakerMute(enable); -#else - return -1; -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::SpeakerMute( - bool& enabled) const { // NOLINT -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.SpeakerMute(enabled); -#else - return -1; -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::MicrophoneMuteIsAvailable( - bool& available) { // NOLINT - return input_.MicrophoneMuteIsAvailable(available); -} - -int32_t AudioDeviceAndroidOpenSLES::SetMicrophoneMute(bool enable) { - return input_.SetMicrophoneMute(enable); -} - -int32_t AudioDeviceAndroidOpenSLES::MicrophoneMute( - bool& enabled) const { // NOLINT - return input_.MicrophoneMute(enabled); -} - -int32_t AudioDeviceAndroidOpenSLES::MicrophoneBoostIsAvailable( - bool& available) { // NOLINT - return input_.MicrophoneBoostIsAvailable(available); -} - -int32_t AudioDeviceAndroidOpenSLES::SetMicrophoneBoost(bool enable) { - return input_.SetMicrophoneBoost(enable); -} - -int32_t AudioDeviceAndroidOpenSLES::MicrophoneBoost( - bool& enabled) const { // NOLINT - return input_.MicrophoneBoost(enabled); -} - -int32_t AudioDeviceAndroidOpenSLES::StereoPlayoutIsAvailable( - bool& available) { // NOLINT -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.StereoPlayoutIsAvailable(available); -#else - return -1; -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::SetStereoPlayout(bool enable) { -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.SetStereoPlayout(enable); -#else - return -1; -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::StereoPlayout( - bool& enabled) const { // NOLINT -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.StereoPlayout(enabled); -#else - return -1; -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::StereoRecordingIsAvailable( - bool& available) { // NOLINT - return input_.StereoRecordingIsAvailable(available); -} - -int32_t AudioDeviceAndroidOpenSLES::SetStereoRecording(bool enable) { - return input_.SetStereoRecording(enable); -} - -int32_t AudioDeviceAndroidOpenSLES::StereoRecording( - bool& enabled) const { // NOLINT - return input_.StereoRecording(enabled); -} - -int32_t AudioDeviceAndroidOpenSLES::SetPlayoutBuffer( - const AudioDeviceModule::BufferType type, - uint16_t sizeMS) { -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.SetPlayoutBuffer(type, sizeMS); -#else - return -1; -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::PlayoutBuffer( - AudioDeviceModule::BufferType& type, - uint16_t& sizeMS) const { // NOLINT -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.PlayoutBuffer(type, sizeMS); -#else - return -1; -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::PlayoutDelay( - uint16_t& delayMS) const { // NOLINT -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.PlayoutDelay(delayMS); -#else - return -1; -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::RecordingDelay( - uint16_t& delayMS) const { // NOLINT - return input_.RecordingDelay(delayMS); -} - -int32_t AudioDeviceAndroidOpenSLES::CPULoad( - uint16_t& load) const { // NOLINT - return -1; -} - -bool AudioDeviceAndroidOpenSLES::PlayoutWarning() const { -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.PlayoutWarning(); -#else - return false; -#endif -} - -bool AudioDeviceAndroidOpenSLES::PlayoutError() const { -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.PlayoutError(); -#else - return false; -#endif -} - -bool AudioDeviceAndroidOpenSLES::RecordingWarning() const { - return input_.RecordingWarning(); -} - -bool AudioDeviceAndroidOpenSLES::RecordingError() const { - return input_.RecordingError(); -} - -void AudioDeviceAndroidOpenSLES::ClearPlayoutWarning() { -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.ClearPlayoutWarning(); -#else - return; -#endif -} - -void AudioDeviceAndroidOpenSLES::ClearPlayoutError() { -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.ClearPlayoutError(); -#else - return; -#endif -} - -void AudioDeviceAndroidOpenSLES::ClearRecordingWarning() { - return input_.ClearRecordingWarning(); -} - -void AudioDeviceAndroidOpenSLES::ClearRecordingError() { - return input_.ClearRecordingError(); -} - -void AudioDeviceAndroidOpenSLES::AttachAudioBuffer( - AudioDeviceBuffer* audioBuffer) { -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - output_.AttachAudioBuffer(audioBuffer); -#endif - input_.AttachAudioBuffer(audioBuffer); -} - -int32_t AudioDeviceAndroidOpenSLES::SetLoudspeakerStatus(bool enable) { -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.SetLoudspeakerStatus(enable); -#else - return -1; -#endif -} - -int32_t AudioDeviceAndroidOpenSLES::GetLoudspeakerStatus( - bool& enable) const { // NOLINT -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - return output_.GetLoudspeakerStatus(enable); -#else - return -1; -#endif -} - -} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_opensles_android.h b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_opensles_android.h deleted file mode 100644 index 7564da05c832..000000000000 --- a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_opensles_android.h +++ /dev/null @@ -1,166 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_DEVICE_OPENSLES_ANDROID_H_ -#define WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_DEVICE_OPENSLES_ANDROID_H_ - -#include "webrtc/modules/audio_device/audio_device_generic.h" -#include "webrtc/modules/audio_device/android/opensles_input.h" -#include "webrtc/modules/audio_device/android/opensles_output.h" - -namespace webrtc { - -// Implements the interface of AudioDeviceGeneric. OpenSlesOutput and -// OpenSlesInput are the imlementations. -class AudioDeviceAndroidOpenSLES : public AudioDeviceGeneric { - public: - explicit AudioDeviceAndroidOpenSLES(const int32_t id); - virtual ~AudioDeviceAndroidOpenSLES(); - - // Retrieve the currently utilized audio layer - virtual int32_t ActiveAudioLayer( - AudioDeviceModule::AudioLayer& audioLayer) const; // NOLINT - - // Main initializaton and termination - virtual int32_t Init(); - virtual int32_t Terminate() ; - virtual bool Initialized() const; - - // Device enumeration - virtual int16_t PlayoutDevices(); - virtual int16_t RecordingDevices(); - virtual int32_t PlayoutDeviceName(uint16_t index, - char name[kAdmMaxDeviceNameSize], - char guid[kAdmMaxGuidSize]); - virtual int32_t RecordingDeviceName(uint16_t index, - char name[kAdmMaxDeviceNameSize], - char guid[kAdmMaxGuidSize]); - - // Device selection - virtual int32_t SetPlayoutDevice(uint16_t index); - virtual int32_t SetPlayoutDevice( - AudioDeviceModule::WindowsDeviceType device); - virtual int32_t SetRecordingDevice(uint16_t index); - virtual int32_t SetRecordingDevice( - AudioDeviceModule::WindowsDeviceType device); - - // Audio transport initialization - virtual int32_t PlayoutIsAvailable(bool& available); - virtual int32_t InitPlayout(); - virtual bool PlayoutIsInitialized() const; - virtual int32_t RecordingIsAvailable(bool& available); - virtual int32_t InitRecording(); - virtual bool RecordingIsInitialized() const; - - // Audio transport control - virtual int32_t StartPlayout(); - virtual int32_t StopPlayout(); - virtual bool Playing() const; - virtual int32_t StartRecording(); - virtual int32_t StopRecording(); - virtual bool Recording() const; - - // Microphone Automatic Gain Control (AGC) - virtual int32_t SetAGC(bool enable); - virtual bool AGC() const; - - // Volume control based on the Windows Wave API (Windows only) - virtual int32_t SetWaveOutVolume(uint16_t volumeLeft, - uint16_t volumeRight); - virtual int32_t WaveOutVolume( - uint16_t& volumeLeft, // NOLINT - uint16_t& volumeRight) const; // NOLINT - - // Audio mixer initialization - virtual int32_t SpeakerIsAvailable(bool& available); - virtual int32_t InitSpeaker(); - virtual bool SpeakerIsInitialized() const; - virtual int32_t MicrophoneIsAvailable(bool& available); - virtual int32_t InitMicrophone(); - virtual bool MicrophoneIsInitialized() const; - - // Speaker volume controls - virtual int32_t SpeakerVolumeIsAvailable(bool& available); - virtual int32_t SetSpeakerVolume(uint32_t volume); - virtual int32_t SpeakerVolume(uint32_t& volume) const; - virtual int32_t MaxSpeakerVolume(uint32_t& maxVolume) const; - virtual int32_t MinSpeakerVolume(uint32_t& minVolume) const; - virtual int32_t SpeakerVolumeStepSize(uint16_t& stepSize) const; - - // Microphone volume controls - virtual int32_t MicrophoneVolumeIsAvailable(bool& available); - virtual int32_t SetMicrophoneVolume(uint32_t volume); - virtual int32_t MicrophoneVolume(uint32_t& volume) const; - virtual int32_t MaxMicrophoneVolume(uint32_t& maxVolume) const; - virtual int32_t MinMicrophoneVolume(uint32_t& minVolume) const; - virtual int32_t MicrophoneVolumeStepSize( - uint16_t& stepSize) const; - - // Speaker mute control - virtual int32_t SpeakerMuteIsAvailable(bool& available); - virtual int32_t SetSpeakerMute(bool enable); - virtual int32_t SpeakerMute(bool& enabled) const; - - // Microphone mute control - virtual int32_t MicrophoneMuteIsAvailable(bool& available); - virtual int32_t SetMicrophoneMute(bool enable); - virtual int32_t MicrophoneMute(bool& enabled) const; - - // Microphone boost control - virtual int32_t MicrophoneBoostIsAvailable(bool& available); - virtual int32_t SetMicrophoneBoost(bool enable); - virtual int32_t MicrophoneBoost(bool& enabled) const; - - // Stereo support - virtual int32_t StereoPlayoutIsAvailable(bool& available); - virtual int32_t SetStereoPlayout(bool enable); - virtual int32_t StereoPlayout(bool& enabled) const; - virtual int32_t StereoRecordingIsAvailable(bool& available); - virtual int32_t SetStereoRecording(bool enable); - virtual int32_t StereoRecording(bool& enabled) const; - - // Delay information and control - virtual int32_t SetPlayoutBuffer(const AudioDeviceModule::BufferType type, - uint16_t sizeMS); - virtual int32_t PlayoutBuffer(AudioDeviceModule::BufferType& type, - uint16_t& sizeMS) const; - virtual int32_t PlayoutDelay(uint16_t& delayMS) const; - virtual int32_t RecordingDelay(uint16_t& delayMS) const; - - // CPU load - virtual int32_t CPULoad(uint16_t& load) const; // NOLINT - - // Error and warning information - virtual bool PlayoutWarning() const; - virtual bool PlayoutError() const; - virtual bool RecordingWarning() const; - virtual bool RecordingError() const; - virtual void ClearPlayoutWarning(); - virtual void ClearPlayoutError(); - virtual void ClearRecordingWarning(); - virtual void ClearRecordingError(); - - // Attach audio buffer - virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer); - - // Speaker audio routing - virtual int32_t SetLoudspeakerStatus(bool enable); - virtual int32_t GetLoudspeakerStatus(bool& enable) const; - - private: -#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT - OpenSlesOutput output_; -#endif - OpenSlesInput input_; -}; - -} // namespace webrtc - -#endif // WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_DEVICE_OPENSLES_ANDROID_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_template.h b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_template.h new file mode 100644 index 000000000000..83fdea9a7963 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_template.h @@ -0,0 +1,425 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_DEVICE_TEMPLATE_H_ +#define WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_DEVICE_TEMPLATE_H_ + +#include "webrtc/modules/audio_device/audio_device_generic.h" + +#include "webrtc/system_wrappers/interface/trace.h" + +namespace webrtc { + +// InputType/OutputType can be any class that implements the capturing/rendering +// part of the AudioDeviceGeneric API. +template +class AudioDeviceTemplate : public AudioDeviceGeneric { + public: + static int32_t SetAndroidAudioDeviceObjects(void* javaVM, + void* env, + void* context) { + if (OutputType::SetAndroidAudioDeviceObjects(javaVM, env, context) == -1) { + return -1; + } + return InputType::SetAndroidAudioDeviceObjects(javaVM, env, context); + } + + static void ClearAndroidAudioDeviceObjects() { + OutputType::ClearAndroidAudioDeviceObjects(); + InputType::ClearAndroidAudioDeviceObjects(); + } + + explicit AudioDeviceTemplate(const int32_t id) + : output_(id), + input_(id, &output_) { + } + + virtual ~AudioDeviceTemplate() { + } + + int32_t ActiveAudioLayer( + AudioDeviceModule::AudioLayer& audioLayer) const { // NOLINT + audioLayer = AudioDeviceModule::kPlatformDefaultAudio; + return 0; + } + + int32_t Init() { + return output_.Init() | input_.Init(); + } + + int32_t Terminate() { + return output_.Terminate() | input_.Terminate(); + } + + bool Initialized() const { + return output_.Initialized() && input_.Initialized(); + } + + int16_t PlayoutDevices() { + return output_.PlayoutDevices(); + } + + int16_t RecordingDevices() { + return input_.RecordingDevices(); + } + + int32_t PlayoutDeviceName( + uint16_t index, + char name[kAdmMaxDeviceNameSize], + char guid[kAdmMaxGuidSize]) { + return output_.PlayoutDeviceName(index, name, guid); + } + + int32_t RecordingDeviceName( + uint16_t index, + char name[kAdmMaxDeviceNameSize], + char guid[kAdmMaxGuidSize]) { + return input_.RecordingDeviceName(index, name, guid); + } + + int32_t SetPlayoutDevice(uint16_t index) { + return output_.SetPlayoutDevice(index); + } + + int32_t SetPlayoutDevice( + AudioDeviceModule::WindowsDeviceType device) { + return output_.SetPlayoutDevice(device); + } + + int32_t SetRecordingDevice(uint16_t index) { + return input_.SetRecordingDevice(index); + } + + int32_t SetRecordingDevice( + AudioDeviceModule::WindowsDeviceType device) { + return input_.SetRecordingDevice(device); + } + + int32_t PlayoutIsAvailable( + bool& available) { // NOLINT + return output_.PlayoutIsAvailable(available); + } + + int32_t InitPlayout() { + return output_.InitPlayout(); + } + + bool PlayoutIsInitialized() const { + return output_.PlayoutIsInitialized(); + } + + int32_t RecordingIsAvailable( + bool& available) { // NOLINT + return input_.RecordingIsAvailable(available); + } + + int32_t InitRecording() { + return input_.InitRecording(); + } + + bool RecordingIsInitialized() const { + return input_.RecordingIsInitialized(); + } + + int32_t StartPlayout() { + return output_.StartPlayout(); + } + + int32_t StopPlayout() { + return output_.StopPlayout(); + } + + bool Playing() const { + return output_.Playing(); + } + + int32_t StartRecording() { + return input_.StartRecording(); + } + + int32_t StopRecording() { + return input_.StopRecording(); + } + + bool Recording() const { + return input_.Recording() ; + } + + int32_t SetAGC(bool enable) { + return input_.SetAGC(enable); + } + + bool AGC() const { + return input_.AGC(); + } + + int32_t SetWaveOutVolume(uint16_t volumeLeft, + uint16_t volumeRight) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, 0, + " API call not supported on this platform"); + return -1; + } + + int32_t WaveOutVolume( + uint16_t& volumeLeft, // NOLINT + uint16_t& volumeRight) const { // NOLINT + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, 0, + " API call not supported on this platform"); + return -1; + } + + int32_t SpeakerIsAvailable( + bool& available) { // NOLINT + return output_.SpeakerIsAvailable(available); + } + + int32_t InitSpeaker() { + return output_.InitSpeaker(); + } + + bool SpeakerIsInitialized() const { + return output_.SpeakerIsInitialized(); + } + + int32_t MicrophoneIsAvailable( + bool& available) { // NOLINT + return input_.MicrophoneIsAvailable(available); + } + + int32_t InitMicrophone() { + return input_.InitMicrophone(); + } + + bool MicrophoneIsInitialized() const { + return input_.MicrophoneIsInitialized(); + } + + int32_t SpeakerVolumeIsAvailable( + bool& available) { // NOLINT + return output_.SpeakerVolumeIsAvailable(available); + } + + int32_t SetSpeakerVolume(uint32_t volume) { + return output_.SetSpeakerVolume(volume); + } + + int32_t SpeakerVolume( + uint32_t& volume) const { // NOLINT + return output_.SpeakerVolume(volume); + } + + int32_t MaxSpeakerVolume( + uint32_t& maxVolume) const { // NOLINT + return output_.MaxSpeakerVolume(maxVolume); + } + + int32_t MinSpeakerVolume( + uint32_t& minVolume) const { // NOLINT + return output_.MinSpeakerVolume(minVolume); + } + + int32_t SpeakerVolumeStepSize( + uint16_t& stepSize) const { // NOLINT + return output_.SpeakerVolumeStepSize(stepSize); + } + + int32_t MicrophoneVolumeIsAvailable( + bool& available) { // NOLINT + return input_.MicrophoneVolumeIsAvailable(available); + } + + int32_t SetMicrophoneVolume(uint32_t volume) { + return input_.SetMicrophoneVolume(volume); + } + + int32_t MicrophoneVolume( + uint32_t& volume) const { // NOLINT + return input_.MicrophoneVolume(volume); + } + + int32_t MaxMicrophoneVolume( + uint32_t& maxVolume) const { // NOLINT + return input_.MaxMicrophoneVolume(maxVolume); + } + + int32_t MinMicrophoneVolume( + uint32_t& minVolume) const { // NOLINT + return input_.MinMicrophoneVolume(minVolume); + } + + int32_t MicrophoneVolumeStepSize( + uint16_t& stepSize) const { // NOLINT + return input_.MicrophoneVolumeStepSize(stepSize); + } + + int32_t SpeakerMuteIsAvailable( + bool& available) { // NOLINT + return output_.SpeakerMuteIsAvailable(available); + } + + int32_t SetSpeakerMute(bool enable) { + return output_.SetSpeakerMute(enable); + } + + int32_t SpeakerMute( + bool& enabled) const { // NOLINT + return output_.SpeakerMute(enabled); + } + + int32_t MicrophoneMuteIsAvailable( + bool& available) { // NOLINT + return input_.MicrophoneMuteIsAvailable(available); + } + + int32_t SetMicrophoneMute(bool enable) { + return input_.SetMicrophoneMute(enable); + } + + int32_t MicrophoneMute( + bool& enabled) const { // NOLINT + return input_.MicrophoneMute(enabled); + } + + int32_t MicrophoneBoostIsAvailable( + bool& available) { // NOLINT + return input_.MicrophoneBoostIsAvailable(available); + } + + int32_t SetMicrophoneBoost(bool enable) { + return input_.SetMicrophoneBoost(enable); + } + + int32_t MicrophoneBoost( + bool& enabled) const { // NOLINT + return input_.MicrophoneBoost(enabled); + } + + int32_t StereoPlayoutIsAvailable( + bool& available) { // NOLINT + return output_.StereoPlayoutIsAvailable(available); + } + + int32_t SetStereoPlayout(bool enable) { + return output_.SetStereoPlayout(enable); + } + + int32_t StereoPlayout( + bool& enabled) const { // NOLINT + return output_.StereoPlayout(enabled); + } + + int32_t StereoRecordingIsAvailable( + bool& available) { // NOLINT + return input_.StereoRecordingIsAvailable(available); + } + + int32_t SetStereoRecording(bool enable) { + return input_.SetStereoRecording(enable); + } + + int32_t StereoRecording( + bool& enabled) const { // NOLINT + return input_.StereoRecording(enabled); + } + + int32_t SetPlayoutBuffer( + const AudioDeviceModule::BufferType type, + uint16_t sizeMS) { + return output_.SetPlayoutBuffer(type, sizeMS); + } + + int32_t PlayoutBuffer( + AudioDeviceModule::BufferType& type, + uint16_t& sizeMS) const { // NOLINT + return output_.PlayoutBuffer(type, sizeMS); + } + + int32_t PlayoutDelay( + uint16_t& delayMS) const { // NOLINT + return output_.PlayoutDelay(delayMS); + } + + int32_t RecordingDelay( + uint16_t& delayMS) const { // NOLINT + return input_.RecordingDelay(delayMS); + } + + int32_t CPULoad( + uint16_t& load) const { // NOLINT + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, 0, + " API call not supported on this platform"); + return -1; + } + + bool PlayoutWarning() const { + return output_.PlayoutWarning(); + } + + bool PlayoutError() const { + return output_.PlayoutError(); + } + + bool RecordingWarning() const { + return input_.RecordingWarning(); + } + + bool RecordingError() const { + return input_.RecordingError(); + } + + void ClearPlayoutWarning() { + return output_.ClearPlayoutWarning(); + } + + void ClearPlayoutError() { + return output_.ClearPlayoutError(); + } + + void ClearRecordingWarning() { + return input_.ClearRecordingWarning(); + } + + void ClearRecordingError() { + return input_.ClearRecordingError(); + } + + void AttachAudioBuffer( + AudioDeviceBuffer* audioBuffer) { + output_.AttachAudioBuffer(audioBuffer); + input_.AttachAudioBuffer(audioBuffer); + } + + int32_t SetRecordingSampleRate( + const uint32_t samplesPerSec) { + return input_.SetRecordingSampleRate(samplesPerSec); + } + + int32_t SetPlayoutSampleRate( + const uint32_t samplesPerSec) { + return output_.SetPlayoutSampleRate(samplesPerSec); + } + + int32_t SetLoudspeakerStatus(bool enable) { + return output_.SetLoudspeakerStatus(enable); + } + + int32_t GetLoudspeakerStatus( + bool& enable) const { // NOLINT + return output_.GetLoudspeakerStatus(enable); + } + + private: + OutputType output_; + InputType input_; +}; + +} // namespace webrtc + +#endif // WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_DEVICE_TEMPLATE_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_manager_jni.cc b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_manager_jni.cc index 6d2171f3eb5d..3f287caebcf3 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_manager_jni.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_manager_jni.cc @@ -13,39 +13,9 @@ #include #include "AndroidJNIWrapper.h" +#include "webrtc/modules/utility/interface/helpers_android.h" #include "webrtc/system_wrappers/interface/trace.h" -namespace { - -class AttachThreadScoped { - public: - explicit AttachThreadScoped(JavaVM* jvm) - : attached_(false), jvm_(jvm), env_(NULL) { - jint ret_val = jvm->GetEnv(reinterpret_cast(&env_), - REQUIRED_JNI_VERSION); - if (ret_val == JNI_EDETACHED) { - // Attach the thread to the Java VM. - ret_val = jvm_->AttachCurrentThread(&env_, NULL); - attached_ = ret_val > 0; - assert(attached_); - } - } - ~AttachThreadScoped() { - if (attached_ && (jvm_->DetachCurrentThread() < 0)) { - assert(false); - } - } - - JNIEnv* env() { return env_; } - - private: - bool attached_; - JavaVM* jvm_; - JNIEnv* env_; -}; - -} // namespace - namespace webrtc { static JavaVM* g_jvm_ = NULL; diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_manager_jni.h b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_manager_jni.h index d8d7bda7a3cb..4b897a1e717f 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_manager_jni.h +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_manager_jni.h @@ -18,8 +18,6 @@ namespace webrtc { -#define REQUIRED_JNI_VERSION JNI_VERSION_1_4 - class AudioManagerJni { public: AudioManagerJni(); @@ -44,9 +42,9 @@ class AudioManagerJni { // SetAndroidAudioDeviceObjects. static void ClearAndroidAudioDeviceObjects(); - bool low_latency_supported() { return low_latency_supported_; } - int native_output_sample_rate() { return native_output_sample_rate_; } - int native_buffer_size() { return native_buffer_size_; } + bool low_latency_supported() const { return low_latency_supported_; } + int native_output_sample_rate() const { return native_output_sample_rate_; } + int native_buffer_size() const { return native_buffer_size_; } private: bool HasDeviceObjects(); diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_record_jni.cc b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_record_jni.cc new file mode 100644 index 000000000000..7774e3483b76 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_record_jni.cc @@ -0,0 +1,1188 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +/* + * Android audio device implementation (JNI/AudioRecord usage) + */ + +// TODO(xians): Break out attach and detach current thread to JVM to +// separate functions. + +#include "AndroidJNIWrapper.h" +#include "webrtc/modules/audio_device/android/audio_record_jni.h" + +#include +#include + +#include "webrtc/modules/audio_device/android/audio_common.h" +#include "webrtc/modules/audio_device/audio_device_config.h" +#include "webrtc/modules/audio_device/audio_device_utility.h" + +#include "webrtc/system_wrappers/interface/event_wrapper.h" +#include "webrtc/system_wrappers/interface/thread_wrapper.h" +#include "webrtc/system_wrappers/interface/trace.h" + +namespace webrtc { + +JavaVM* AudioRecordJni::globalJvm = NULL; +JNIEnv* AudioRecordJni::globalJNIEnv = NULL; +jobject AudioRecordJni::globalContext = NULL; +jclass AudioRecordJni::globalScClass = NULL; + +int32_t AudioRecordJni::SetAndroidAudioDeviceObjects(void* javaVM, void* env, + void* context) { + assert(env); + globalJvm = reinterpret_cast(javaVM); + globalJNIEnv = reinterpret_cast(env); + // Get java class type (note path to class packet). + if (!globalScClass) { + globalScClass = jsjni_GetGlobalClassRef( + "org/webrtc/voiceengine/WebRtcAudioRecord"); + if (!globalScClass) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1, + "%s: could not find java class", __FUNCTION__); + return -1; // exception thrown + } + } + + if (!globalContext) { + globalContext = jsjni_GetGlobalContextRef(); + if (!globalContext) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1, + "%s: could not create context reference", __FUNCTION__); + return -1; + } + } + + return 0; +} + +void AudioRecordJni::ClearAndroidAudioDeviceObjects() { + WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1, + "%s: env is NULL, assuming deinit", __FUNCTION__); + + globalJvm = NULL; + if (!globalJNIEnv) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, -1, + "%s: saved env already NULL", __FUNCTION__); + return; + } + + globalContext = reinterpret_cast(NULL); + + globalJNIEnv->DeleteGlobalRef(globalScClass); + globalScClass = reinterpret_cast(NULL); + + globalJNIEnv = reinterpret_cast(NULL); +} + +AudioRecordJni::AudioRecordJni( + const int32_t id, PlayoutDelayProvider* delay_provider) + : _javaVM(NULL), + _jniEnvRec(NULL), + _javaScClass(0), + _javaScObj(0), + _javaRecBuffer(0), + _javaDirectRecBuffer(NULL), + _javaMidRecAudio(0), + _ptrAudioBuffer(NULL), + _critSect(*CriticalSectionWrapper::CreateCriticalSection()), + _id(id), + _delay_provider(delay_provider), + _initialized(false), + _timeEventRec(*EventWrapper::Create()), + _recStartStopEvent(*EventWrapper::Create()), + _ptrThreadRec(NULL), + _recThreadID(0), + _recThreadIsInitialized(false), + _shutdownRecThread(false), + _recordingDeviceIsSpecified(false), + _recording(false), + _recIsInitialized(false), + _micIsInitialized(false), + _startRec(false), + _recWarning(0), + _recError(0), + _delayRecording(0), + _AGC(false), + _samplingFreqIn((N_REC_SAMPLES_PER_SEC)), + _recAudioSource(1) { // 1 is AudioSource.MIC which is our default + memset(_recBuffer, 0, sizeof(_recBuffer)); +} + +AudioRecordJni::~AudioRecordJni() { + WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, + "%s destroyed", __FUNCTION__); + + Terminate(); + + delete &_recStartStopEvent; + delete &_timeEventRec; + delete &_critSect; +} + +int32_t AudioRecordJni::Init() { + CriticalSectionScoped lock(&_critSect); + + if (_initialized) + { + return 0; + } + + _recWarning = 0; + _recError = 0; + + // Init Java member variables + // and set up JNI interface to + // AudioDeviceAndroid java class + if (InitJavaResources() != 0) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: Failed to init Java resources", __FUNCTION__); + return -1; + } + + // Check the sample rate to be used for playback and recording + // and the max playout volume + if (InitSampleRate() != 0) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: Failed to init samplerate", __FUNCTION__); + return -1; + } + + const char* threadName = "jni_audio_capture_thread"; + _ptrThreadRec = ThreadWrapper::CreateThread(RecThreadFunc, this, + kRealtimePriority, threadName); + if (_ptrThreadRec == NULL) + { + WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, + " failed to create the rec audio thread"); + return -1; + } + + unsigned int threadID(0); + if (!_ptrThreadRec->Start(threadID)) + { + WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, + " failed to start the rec audio thread"); + delete _ptrThreadRec; + _ptrThreadRec = NULL; + return -1; + } + _recThreadID = threadID; + _initialized = true; + + return 0; +} + +int32_t AudioRecordJni::Terminate() { + CriticalSectionScoped lock(&_critSect); + + if (!_initialized) + { + return 0; + } + + StopRecording(); + _shutdownRecThread = true; + _timeEventRec.Set(); // Release rec thread from waiting state + if (_ptrThreadRec) + { + // First, the thread must detach itself from Java VM + _critSect.Leave(); + if (kEventSignaled != _recStartStopEvent.Wait(5000)) + { + WEBRTC_TRACE( + kTraceError, + kTraceAudioDevice, + _id, + "%s: Recording thread shutdown timed out, cannot " + "terminate thread", + __FUNCTION__); + // If we close thread anyway, the app will crash + return -1; + } + _recStartStopEvent.Reset(); + _critSect.Enter(); + + // Close down rec thread + ThreadWrapper* tmpThread = _ptrThreadRec; + _ptrThreadRec = NULL; + _critSect.Leave(); + tmpThread->SetNotAlive(); + // Release again, we might have returned to waiting state + _timeEventRec.Set(); + if (tmpThread->Stop()) + { + delete tmpThread; + _jniEnvRec = NULL; + } + else + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " failed to close down the rec audio thread"); + } + _critSect.Enter(); + + _recThreadIsInitialized = false; + } + _micIsInitialized = false; + _recordingDeviceIsSpecified = false; + + // get the JNI env for this thread + JNIEnv *env; + bool isAttached = false; + + // get the JNI env for this thread + if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) + { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = _javaVM->AttachCurrentThread(&env, NULL); + if ((res < 0) || !env) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, res, env); + return -1; + } + isAttached = true; + } + + // Make method IDs and buffer pointers unusable + _javaMidRecAudio = 0; + _javaDirectRecBuffer = NULL; + + // Delete the references to the java buffers, this allows the + // garbage collector to delete them + env->DeleteGlobalRef(_javaRecBuffer); + _javaRecBuffer = 0; + + // Delete the references to the java object and class, this allows the + // garbage collector to delete them + env->DeleteGlobalRef(_javaScObj); + _javaScObj = 0; + _javaScClass = 0; + + // Detach this thread if it was attached + if (isAttached) + { + if (_javaVM->DetachCurrentThread() < 0) + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + "%s: Could not detach thread from JVM", __FUNCTION__); + } + } + + _initialized = false; + + return 0; +} + +int32_t AudioRecordJni::RecordingDeviceName(uint16_t index, + char name[kAdmMaxDeviceNameSize], + char guid[kAdmMaxGuidSize]) { + if (0 != index) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Device index is out of range [0,0]"); + return -1; + } + + // Return empty string + memset(name, 0, kAdmMaxDeviceNameSize); + + if (guid) + { + memset(guid, 0, kAdmMaxGuidSize); + } + + return 0; +} + +int32_t AudioRecordJni::SetRecordingDevice(uint16_t index) { + if (_recIsInitialized) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Recording already initialized"); + return -1; + } + + // Recording device index will be used for specifying recording + // audio source, allow any value + _recAudioSource = index; + _recordingDeviceIsSpecified = true; + + return 0; +} + +int32_t AudioRecordJni::SetRecordingDevice( + AudioDeviceModule::WindowsDeviceType device) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +int32_t AudioRecordJni::RecordingIsAvailable(bool& available) { // NOLINT + available = false; + + // Try to initialize the playout side + int32_t res = InitRecording(); + + // Cancel effect of initialization + StopRecording(); + + if (res != -1) + { + available = true; + } + + return res; +} + +int32_t AudioRecordJni::InitRecording() { + CriticalSectionScoped lock(&_critSect); + + if (!_initialized) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Not initialized"); + return -1; + } + + if (_recording) + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Recording already started"); + return -1; + } + + if (!_recordingDeviceIsSpecified) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Recording device is not specified"); + return -1; + } + + if (_recIsInitialized) + { + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, + " Recording already initialized"); + return 0; + } + + // Initialize the microphone + if (InitMicrophone() == -1) + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " InitMicrophone() failed"); + } + + // get the JNI env for this thread + JNIEnv *env; + bool isAttached = false; + + // get the JNI env for this thread + if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) + { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = _javaVM->AttachCurrentThread(&env, NULL); + if ((res < 0) || !env) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Could not attach thread to JVM (%d, %p)", res, env); + return -1; + } + isAttached = true; + } + + // get the method ID + jmethodID initRecordingID = env->GetMethodID(_javaScClass, "InitRecording", + "(II)I"); + + int retVal = -1; + + // call java sc object method + jint res = env->CallIntMethod(_javaScObj, initRecordingID, _recAudioSource, + _samplingFreqIn); + if (res < 0) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "InitRecording failed (%d)", res); + } + else + { + // Set the audio device buffer sampling rate + _ptrAudioBuffer->SetRecordingSampleRate(_samplingFreqIn); + + // the init rec function returns a fixed delay + _delayRecording = res / _samplingFreqIn; + + _recIsInitialized = true; + retVal = 0; + } + + // Detach this thread if it was attached + if (isAttached) + { + if (_javaVM->DetachCurrentThread() < 0) + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Could not detach thread from JVM"); + } + } + + return retVal; +} + +int32_t AudioRecordJni::StartRecording() { + CriticalSectionScoped lock(&_critSect); + + if (!_recIsInitialized) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Recording not initialized"); + return -1; + } + + if (_recording) + { + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, + " Recording already started"); + return 0; + } + + // get the JNI env for this thread + JNIEnv *env; + bool isAttached = false; + + // get the JNI env for this thread + if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) + { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = _javaVM->AttachCurrentThread(&env, NULL); + if ((res < 0) || !env) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Could not attach thread to JVM (%d, %p)", res, env); + return -1; + } + isAttached = true; + } + + // get the method ID + jmethodID startRecordingID = env->GetMethodID(_javaScClass, + "StartRecording", "()I"); + + // Call java sc object method + jint res = env->CallIntMethod(_javaScObj, startRecordingID); + if (res < 0) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "StartRecording failed (%d)", res); + return -1; + } + + _recWarning = 0; + _recError = 0; + + // Signal to recording thread that we want to start + _startRec = true; + _timeEventRec.Set(); // Release thread from waiting state + _critSect.Leave(); + // Wait for thread to init + if (kEventSignaled != _recStartStopEvent.Wait(5000)) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Timeout or error starting"); + } + _recStartStopEvent.Reset(); + _critSect.Enter(); + + // Detach this thread if it was attached + if (isAttached) + { + if (_javaVM->DetachCurrentThread() < 0) + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Could not detach thread from JVM"); + } + } + + return 0; + +} + +int32_t AudioRecordJni::StopRecording() { + CriticalSectionScoped lock(&_critSect); + + if (!_recIsInitialized) + { + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, + " Recording is not initialized"); + return 0; + } + + // make sure we don't start recording (it's asynchronous), + // assuming that we are under lock + _startRec = false; + + // get the JNI env for this thread + JNIEnv *env; + bool isAttached = false; + + // get the JNI env for this thread + if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) + { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = _javaVM->AttachCurrentThread(&env, NULL); + if ((res < 0) || !env) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Could not attach thread to JVM (%d, %p)", res, env); + return -1; + } + isAttached = true; + } + + // get the method ID + jmethodID stopRecordingID = env->GetMethodID(_javaScClass, "StopRecording", + "()I"); + + // Call java sc object method + jint res = env->CallIntMethod(_javaScObj, stopRecordingID); + if (res < 0) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "StopRecording failed (%d)", res); + } + + _recIsInitialized = false; + _recording = false; + _recWarning = 0; + _recError = 0; + + // Detach this thread if it was attached + if (isAttached) + { + if (_javaVM->DetachCurrentThread() < 0) + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Could not detach thread from JVM"); + } + } + + return 0; + +} + +int32_t AudioRecordJni::SetAGC(bool enable) { + _AGC = enable; + return 0; +} + +int32_t AudioRecordJni::MicrophoneIsAvailable(bool& available) { // NOLINT + // We always assume it's available + available = true; + return 0; +} + +int32_t AudioRecordJni::InitMicrophone() { + CriticalSectionScoped lock(&_critSect); + + if (_recording) + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Recording already started"); + return -1; + } + + if (!_recordingDeviceIsSpecified) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Recording device is not specified"); + return -1; + } + + // Nothing needs to be done here, we use a flag to have consistent + // behavior with other platforms + _micIsInitialized = true; + + return 0; +} + +int32_t AudioRecordJni::MicrophoneVolumeIsAvailable( + bool& available) { // NOLINT + available = false; // Mic volume not supported on Android + return 0; +} + +int32_t AudioRecordJni::SetMicrophoneVolume( uint32_t /*volume*/) { + + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +int32_t AudioRecordJni::MicrophoneVolume(uint32_t& volume) const { // NOLINT + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +int32_t AudioRecordJni::MaxMicrophoneVolume( + uint32_t& maxVolume) const { // NOLINT + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +int32_t AudioRecordJni::MinMicrophoneVolume( + uint32_t& minVolume) const { // NOLINT + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +int32_t AudioRecordJni::MicrophoneVolumeStepSize( + uint16_t& stepSize) const { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +int32_t AudioRecordJni::MicrophoneMuteIsAvailable(bool& available) { // NOLINT + available = false; // Mic mute not supported on Android + return 0; +} + +int32_t AudioRecordJni::SetMicrophoneMute(bool enable) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +int32_t AudioRecordJni::MicrophoneMute(bool& enabled) const { // NOLINT + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +int32_t AudioRecordJni::MicrophoneBoostIsAvailable(bool& available) { // NOLINT + available = false; // Mic boost not supported on Android + return 0; +} + +int32_t AudioRecordJni::SetMicrophoneBoost(bool enable) { + if (!_micIsInitialized) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Microphone not initialized"); + return -1; + } + + if (enable) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Enabling not available"); + return -1; + } + + return 0; +} + +int32_t AudioRecordJni::MicrophoneBoost(bool& enabled) const { // NOLINT + if (!_micIsInitialized) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Microphone not initialized"); + return -1; + } + + enabled = false; + + return 0; +} + +int32_t AudioRecordJni::StereoRecordingIsAvailable(bool& available) { // NOLINT + available = false; // Stereo recording not supported on Android + return 0; +} + +int32_t AudioRecordJni::SetStereoRecording(bool enable) { + if (enable) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Enabling not available"); + return -1; + } + + return 0; +} + +int32_t AudioRecordJni::StereoRecording(bool& enabled) const { // NOLINT + enabled = false; + return 0; +} + +int32_t AudioRecordJni::RecordingDelay(uint16_t& delayMS) const { // NOLINT + delayMS = _delayRecording; + return 0; +} + +bool AudioRecordJni::RecordingWarning() const { + return (_recWarning > 0); +} + +bool AudioRecordJni::RecordingError() const { + return (_recError > 0); +} + +void AudioRecordJni::ClearRecordingWarning() { + _recWarning = 0; +} + +void AudioRecordJni::ClearRecordingError() { + _recError = 0; +} + +void AudioRecordJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) { + CriticalSectionScoped lock(&_critSect); + _ptrAudioBuffer = audioBuffer; + // inform the AudioBuffer about default settings for this implementation + _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC); + _ptrAudioBuffer->SetRecordingChannels(N_REC_CHANNELS); +} + +int32_t AudioRecordJni::SetRecordingSampleRate(const uint32_t samplesPerSec) { + if (samplesPerSec > 48000 || samplesPerSec < 8000) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Invalid sample rate"); + return -1; + } + + // set the recording sample rate to use + _samplingFreqIn = samplesPerSec; + + // Update the AudioDeviceBuffer + _ptrAudioBuffer->SetRecordingSampleRate(samplesPerSec); + + return 0; +} + +int32_t AudioRecordJni::InitJavaResources() { + // todo: Check if we already have created the java object + _javaVM = globalJvm; + _javaScClass = globalScClass; + + // use the jvm that has been set + if (!_javaVM) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: Not a valid Java VM pointer", __FUNCTION__); + return -1; + } + + // get the JNI env for this thread + JNIEnv *env; + bool isAttached = false; + + // get the JNI env for this thread + if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) + { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = _javaVM->AttachCurrentThread(&env, NULL); + if ((res < 0) || !env) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, res, env); + return -1; + } + isAttached = true; + } + + WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, + "get method id"); + + // get the method ID for the void(void) constructor + jmethodID cid = env->GetMethodID(_javaScClass, "", "()V"); + if (cid == NULL) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: could not get constructor ID", __FUNCTION__); + return -1; /* exception thrown */ + } + + WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, + "construct object", __FUNCTION__); + + // construct the object + jobject javaScObjLocal = env->NewObject(_javaScClass, cid); + if (!javaScObjLocal) + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + "%s: could not create Java sc object", __FUNCTION__); + return -1; + } + + // Create a reference to the object (to tell JNI that we are referencing it + // after this function has returned). + _javaScObj = env->NewGlobalRef(javaScObjLocal); + if (!_javaScObj) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: could not create Java sc object reference", + __FUNCTION__); + return -1; + } + + // Delete local object ref, we only use the global ref. + env->DeleteLocalRef(javaScObjLocal); + + ////////////////////// + // AUDIO MANAGEMENT + + // This is not mandatory functionality + if (globalContext) { + jfieldID context_id = env->GetFieldID(globalScClass, + "_context", + "Landroid/content/Context;"); + if (!context_id) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: could not get _context id", __FUNCTION__); + return -1; + } + + env->SetObjectField(_javaScObj, context_id, globalContext); + jobject javaContext = env->GetObjectField(_javaScObj, context_id); + if (!javaContext) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: could not set or get _context", __FUNCTION__); + return -1; + } + } + else { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + "%s: did not set Context - some functionality is not " + "supported", + __FUNCTION__); + } + + // Get rec buffer field ID. + jfieldID fidRecBuffer = env->GetFieldID(_javaScClass, "_recBuffer", + "Ljava/nio/ByteBuffer;"); + if (!fidRecBuffer) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: could not get rec buffer fid", __FUNCTION__); + return -1; + } + + // Get rec buffer object. + jobject javaRecBufferLocal = env->GetObjectField(_javaScObj, fidRecBuffer); + if (!javaRecBufferLocal) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: could not get rec buffer", __FUNCTION__); + return -1; + } + + // Create a global reference to the object (to tell JNI that we are + // referencing it after this function has returned) + // NOTE: we are referencing it only through the direct buffer (see below). + _javaRecBuffer = env->NewGlobalRef(javaRecBufferLocal); + if (!_javaRecBuffer) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: could not get rec buffer reference", __FUNCTION__); + return -1; + } + + // Delete local object ref, we only use the global ref. + env->DeleteLocalRef(javaRecBufferLocal); + + // Get direct buffer. + _javaDirectRecBuffer = env->GetDirectBufferAddress(_javaRecBuffer); + if (!_javaDirectRecBuffer) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: could not get direct rec buffer", __FUNCTION__); + return -1; + } + + // Get the rec audio method ID. + _javaMidRecAudio = env->GetMethodID(_javaScClass, "RecordAudio", "(I)I"); + if (!_javaMidRecAudio) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: could not get rec audio mid", __FUNCTION__); + return -1; + } + + // Detach this thread if it was attached. + if (isAttached) + { + if (_javaVM->DetachCurrentThread() < 0) + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + "%s: Could not detach thread from JVM", __FUNCTION__); + } + } + + return 0; + +} + +int32_t AudioRecordJni::InitSampleRate() { + int samplingFreq = 44100; + jint res = 0; + + // get the JNI env for this thread + JNIEnv *env; + bool isAttached = false; + + // get the JNI env for this thread + if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) + { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = _javaVM->AttachCurrentThread(&env, NULL); + if ((res < 0) || !env) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, res, env); + return -1; + } + isAttached = true; + } + + if (_samplingFreqIn > 0) + { + // read the configured sampling rate + samplingFreq = _samplingFreqIn; + WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, + " Trying configured recording sampling rate %d", + samplingFreq); + } + + // get the method ID + jmethodID initRecordingID = env->GetMethodID(_javaScClass, "InitRecording", + "(II)I"); + + bool keepTrying = true; + while (keepTrying) + { + // call java sc object method + res = env->CallIntMethod(_javaScObj, initRecordingID, _recAudioSource, + samplingFreq); + if (res < 0) + { + switch (samplingFreq) + { + case 44100: + samplingFreq = 16000; + break; + case 16000: + samplingFreq = 8000; + break; + default: // error + WEBRTC_TRACE(kTraceError, + kTraceAudioDevice, _id, + "%s: InitRecording failed (%d)", __FUNCTION__, + res); + return -1; + } + } + else + { + keepTrying = false; + } + } + + // set the recording sample rate to use + _samplingFreqIn = samplingFreq; + + WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, + "Recording sample rate set to (%d)", _samplingFreqIn); + + // get the method ID + jmethodID stopRecordingID = env->GetMethodID(_javaScClass, "StopRecording", + "()I"); + + // Call java sc object method + res = env->CallIntMethod(_javaScObj, stopRecordingID); + if (res < 0) + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + "StopRecording failed (%d)", res); + } + + // Detach this thread if it was attached + if (isAttached) + { + if (_javaVM->DetachCurrentThread() < 0) + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + "%s: Could not detach thread from JVM", __FUNCTION__); + } + } + + return 0; +} + +bool AudioRecordJni::RecThreadFunc(void* pThis) +{ + return (static_cast (pThis)->RecThreadProcess()); +} + +bool AudioRecordJni::RecThreadProcess() +{ + if (!_recThreadIsInitialized) + { + // Do once when thread is started + + // Attach this thread to JVM + jint res = _javaVM->AttachCurrentThread(&_jniEnvRec, NULL); + + // Get the JNI env for this thread + if ((res < 0) || !_jniEnvRec) + { + WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, + _id, "Could not attach rec thread to JVM (%d, %p)", + res, _jniEnvRec); + return false; // Close down thread + } + + _recThreadIsInitialized = true; + } + + // just sleep if rec has not started + if (!_recording) + { + switch (_timeEventRec.Wait(1000)) + { + case kEventSignaled: + WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, + _id, "Recording thread event signal"); + _timeEventRec.Reset(); + break; + case kEventError: + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, + _id, "Recording thread event error"); + return true; + case kEventTimeout: + WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, + _id, "Recording thread event timeout"); + return true; + } + } + + Lock(); + + if (_startRec) + { + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, + "_startRec true, performing initial actions"); + _startRec = false; + _recording = true; + _recWarning = 0; + _recError = 0; + _recStartStopEvent.Set(); + } + + if (_recording) + { + uint32_t samplesToRec = _samplingFreqIn / 100; + + // Call java sc object method to record data to direct buffer + // Will block until data has been recorded (see java sc class), + // therefore we must release the lock + UnLock(); + jint recDelayInSamples = _jniEnvRec->CallIntMethod(_javaScObj, + _javaMidRecAudio, + 2 * samplesToRec); + if (recDelayInSamples < 0) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "RecordAudio failed"); + _recWarning = 1; + } + else + { + _delayRecording = (recDelayInSamples * 1000) / _samplingFreqIn; + } + Lock(); + + // Check again since recording may have stopped during Java call + if (_recording) + { + // WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, + // "total delay is %d", msPlayDelay + _delayRecording); + + // Copy data to our direct buffer (held by java sc object) + // todo: Give _javaDirectRecBuffer directly to VoE? + // todo: Check count <= 480 ? + memcpy(_recBuffer, _javaDirectRecBuffer, 2 * samplesToRec); + + // store the recorded buffer (no action will be taken if the + // #recorded samples is not a full buffer) + _ptrAudioBuffer->SetRecordedBuffer(_recBuffer, samplesToRec); + + // store vqe delay values + _ptrAudioBuffer->SetVQEData(_delay_provider->PlayoutDelayMs(), + _delayRecording, 0); + + // deliver recorded samples at specified sample rate, mic level + // etc. to the observer using callback + UnLock(); + _ptrAudioBuffer->DeliverRecordedData(); + Lock(); + } + + } // _recording + + if (_shutdownRecThread) + { + WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, + "Detaching rec thread from Java VM"); + + // Detach thread from Java VM + if (_javaVM->DetachCurrentThread() < 0) + { + WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, + _id, "Could not detach recording thread from JVM"); + _shutdownRecThread = false; + // If we say OK (i.e. set event) and close thread anyway, + // app will crash + } + else + { + _jniEnvRec = NULL; + _shutdownRecThread = false; + _recStartStopEvent.Set(); // Signal to Terminate() that we are done + + WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, + "Sent signal rec"); + } + } + + UnLock(); + return true; +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_record_jni.h b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_record_jni.h new file mode 100644 index 000000000000..4be96ac8d326 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_record_jni.h @@ -0,0 +1,183 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_RECORD_JNI_H_ +#define WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_RECORD_JNI_H_ + +#include + +#include "webrtc/system_wrappers/interface/critical_section_wrapper.h" +#include "webrtc/modules/audio_device/include/audio_device_defines.h" +#include "webrtc/modules/audio_device/audio_device_generic.h" + +namespace webrtc { + +class EventWrapper; +class ThreadWrapper; +class PlayoutDelayProvider; + +class AudioRecordJni { + public: + static int32_t SetAndroidAudioDeviceObjects(void* javaVM, void* env, + void* context); + + static int32_t SetAndroidAudioDeviceObjects(void* javaVM, + void* context); + + static void ClearAndroidAudioDeviceObjects(); + + AudioRecordJni(const int32_t id, PlayoutDelayProvider* delay_provider); + ~AudioRecordJni(); + + // Main initializaton and termination + int32_t Init(); + int32_t Terminate(); + bool Initialized() const { return _initialized; } + + // Device enumeration + int16_t RecordingDevices() { return 1; } // There is one device only + int32_t RecordingDeviceName(uint16_t index, + char name[kAdmMaxDeviceNameSize], + char guid[kAdmMaxGuidSize]); + + // Device selection + int32_t SetRecordingDevice(uint16_t index); + int32_t SetRecordingDevice( + AudioDeviceModule::WindowsDeviceType device); + + // Audio transport initialization + int32_t RecordingIsAvailable(bool& available); // NOLINT + int32_t InitRecording(); + bool RecordingIsInitialized() const { return _recIsInitialized; } + + // Audio transport control + int32_t StartRecording(); + int32_t StopRecording(); + bool Recording() const { return _recording; } + + // Microphone Automatic Gain Control (AGC) + int32_t SetAGC(bool enable); + bool AGC() const { return _AGC; } + + // Audio mixer initialization + int32_t MicrophoneIsAvailable(bool& available); // NOLINT + int32_t InitMicrophone(); + bool MicrophoneIsInitialized() const { return _micIsInitialized; } + + // Microphone volume controls + int32_t MicrophoneVolumeIsAvailable(bool& available); // NOLINT + // TODO(leozwang): Add microphone volume control when OpenSL APIs + // are available. + int32_t SetMicrophoneVolume(uint32_t volume); + int32_t MicrophoneVolume(uint32_t& volume) const; // NOLINT + int32_t MaxMicrophoneVolume(uint32_t& maxVolume) const; // NOLINT + int32_t MinMicrophoneVolume(uint32_t& minVolume) const; // NOLINT + int32_t MicrophoneVolumeStepSize( + uint16_t& stepSize) const; // NOLINT + + // Microphone mute control + int32_t MicrophoneMuteIsAvailable(bool& available); // NOLINT + int32_t SetMicrophoneMute(bool enable); + int32_t MicrophoneMute(bool& enabled) const; // NOLINT + + // Microphone boost control + int32_t MicrophoneBoostIsAvailable(bool& available); // NOLINT + int32_t SetMicrophoneBoost(bool enable); + int32_t MicrophoneBoost(bool& enabled) const; // NOLINT + + // Stereo support + int32_t StereoRecordingIsAvailable(bool& available); // NOLINT + int32_t SetStereoRecording(bool enable); + int32_t StereoRecording(bool& enabled) const; // NOLINT + + // Delay information and control + int32_t RecordingDelay(uint16_t& delayMS) const; // NOLINT + + bool RecordingWarning() const; + bool RecordingError() const; + void ClearRecordingWarning(); + void ClearRecordingError(); + + // Attach audio buffer + void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer); + + int32_t SetRecordingSampleRate(const uint32_t samplesPerSec); + + static const uint32_t N_REC_SAMPLES_PER_SEC = 16000; // Default is 16 kHz + static const uint32_t N_REC_CHANNELS = 1; // default is mono recording + static const uint32_t REC_BUF_SIZE_IN_SAMPLES = 480; // Handle max 10 ms @ 48 kHz + + private: + void Lock() EXCLUSIVE_LOCK_FUNCTION(_critSect) { + _critSect.Enter(); + } + void UnLock() UNLOCK_FUNCTION(_critSect) { + _critSect.Leave(); + } + + int32_t InitJavaResources(); + int32_t InitSampleRate(); + + static bool RecThreadFunc(void*); + bool RecThreadProcess(); + + // TODO(leozwang): Android holds only one JVM, all these jni handling + // will be consolidated into a single place to make it consistant and + // reliable. Chromium has a good example at base/android. + static JavaVM* globalJvm; + static JNIEnv* globalJNIEnv; + static jobject globalContext; + static jclass globalScClass; + + JavaVM* _javaVM; // denotes a Java VM + JNIEnv* _jniEnvRec; // The JNI env for recording thread + jclass _javaScClass; // AudioDeviceAndroid class + jobject _javaScObj; // AudioDeviceAndroid object + jobject _javaRecBuffer; + void* _javaDirectRecBuffer; // Direct buffer pointer to rec buffer + jmethodID _javaMidRecAudio; // Method ID of rec in AudioDeviceAndroid + + AudioDeviceBuffer* _ptrAudioBuffer; + CriticalSectionWrapper& _critSect; + int32_t _id; + PlayoutDelayProvider* _delay_provider; + bool _initialized; + + EventWrapper& _timeEventRec; + EventWrapper& _recStartStopEvent; + ThreadWrapper* _ptrThreadRec; + uint32_t _recThreadID; + bool _recThreadIsInitialized; + bool _shutdownRecThread; + + int8_t _recBuffer[2 * REC_BUF_SIZE_IN_SAMPLES]; + bool _recordingDeviceIsSpecified; + + bool _recording; + bool _recIsInitialized; + bool _micIsInitialized; + + bool _startRec; + + uint16_t _recWarning; + uint16_t _recError; + + uint16_t _delayRecording; + + bool _AGC; + + uint16_t _samplingFreqIn; // Sampling frequency for Mic + int _recAudioSource; + +}; + +} // namespace webrtc + +#endif // WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_RECORD_JNI_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_track_jni.cc b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_track_jni.cc new file mode 100644 index 000000000000..47b4d82ec39c --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_track_jni.cc @@ -0,0 +1,1373 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +/* + * Android audio device implementation (JNI/AudioTrack usage) + */ + +// TODO(xians): Break out attach and detach current thread to JVM to +// separate functions. + +#include "webrtc/modules/audio_device/android/audio_track_jni.h" + +#include +#include + +#include "webrtc/modules/audio_device/audio_device_config.h" +#include "webrtc/modules/audio_device/audio_device_utility.h" + +#include "webrtc/system_wrappers/interface/event_wrapper.h" +#include "webrtc/system_wrappers/interface/thread_wrapper.h" +#include "webrtc/system_wrappers/interface/trace.h" + +#include "AndroidJNIWrapper.h" + +namespace webrtc { + +JavaVM* AudioTrackJni::globalJvm = NULL; +JNIEnv* AudioTrackJni::globalJNIEnv = NULL; +jobject AudioTrackJni::globalContext = NULL; +jclass AudioTrackJni::globalScClass = NULL; + +int32_t AudioTrackJni::SetAndroidAudioDeviceObjects(void* javaVM, void* env, + void* context) { + assert(env); + globalJvm = reinterpret_cast(javaVM); + globalJNIEnv = reinterpret_cast(env); + + // Check if we already got a reference + if (!globalScClass) { + // Get java class type (note path to class packet). + globalScClass = jsjni_GetGlobalClassRef("org/webrtc/voiceengine/WebRtcAudioTrack"); + if (!globalScClass) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1, + "%s: could not find java class", __FUNCTION__); + return -1; // exception thrown + } + } + if (!globalContext) { + globalContext = jsjni_GetGlobalContextRef(); + if (!globalContext) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1, + "%s: could not create context reference", __FUNCTION__); + return -1; + } + } + + return 0; +} + +void AudioTrackJni::ClearAndroidAudioDeviceObjects() { + WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1, + "%s: env is NULL, assuming deinit", __FUNCTION__); + + globalJvm = NULL; + if (!globalJNIEnv) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, -1, + "%s: saved env already NULL", __FUNCTION__); + return; + } + + // No need to delete the shared global context ref. + // globalJNIEnv->DeleteGlobalRef(globalContext); + globalContext = reinterpret_cast(NULL); + + globalJNIEnv->DeleteGlobalRef(globalScClass); + globalScClass = reinterpret_cast(NULL); + + globalJNIEnv = reinterpret_cast(NULL); +} + +AudioTrackJni::AudioTrackJni(const int32_t id) + : _javaVM(NULL), + _jniEnvPlay(NULL), + _javaScClass(0), + _javaScObj(0), + _javaPlayBuffer(0), + _javaDirectPlayBuffer(NULL), + _javaMidPlayAudio(0), + _ptrAudioBuffer(NULL), + _critSect(*CriticalSectionWrapper::CreateCriticalSection()), + _id(id), + _initialized(false), + _timeEventPlay(*EventWrapper::Create()), + _playStartStopEvent(*EventWrapper::Create()), + _ptrThreadPlay(NULL), + _playThreadID(0), + _playThreadIsInitialized(false), + _shutdownPlayThread(false), + _playoutDeviceIsSpecified(false), + _playing(false), + _playIsInitialized(false), + _speakerIsInitialized(false), + _startPlay(false), + _playWarning(0), + _playError(0), + _delayPlayout(0), + _samplingFreqOut((N_PLAY_SAMPLES_PER_SEC)), + _maxSpeakerVolume(0) { +} + +AudioTrackJni::~AudioTrackJni() { + WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, + "%s destroyed", __FUNCTION__); + + Terminate(); + + delete &_playStartStopEvent; + delete &_timeEventPlay; + delete &_critSect; +} + +int32_t AudioTrackJni::Init() { + CriticalSectionScoped lock(&_critSect); + if (_initialized) + { + return 0; + } + + _playWarning = 0; + _playError = 0; + + // Init Java member variables + // and set up JNI interface to + // AudioDeviceAndroid java class + if (InitJavaResources() != 0) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: Failed to init Java resources", __FUNCTION__); + return -1; + } + + // Check the sample rate to be used for playback and recording + // and the max playout volume + if (InitSampleRate() != 0) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: Failed to init samplerate", __FUNCTION__); + return -1; + } + + const char* threadName = "jni_audio_render_thread"; + _ptrThreadPlay = ThreadWrapper::CreateThread(PlayThreadFunc, this, + kRealtimePriority, threadName); + if (_ptrThreadPlay == NULL) + { + WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, + " failed to create the play audio thread"); + return -1; + } + + unsigned int threadID = 0; + if (!_ptrThreadPlay->Start(threadID)) + { + WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, + " failed to start the play audio thread"); + delete _ptrThreadPlay; + _ptrThreadPlay = NULL; + return -1; + } + _playThreadID = threadID; + + _initialized = true; + + return 0; +} + +int32_t AudioTrackJni::Terminate() { + CriticalSectionScoped lock(&_critSect); + if (!_initialized) + { + return 0; + } + + StopPlayout(); + _shutdownPlayThread = true; + _timeEventPlay.Set(); // Release rec thread from waiting state + if (_ptrThreadPlay) + { + // First, the thread must detach itself from Java VM + _critSect.Leave(); + if (kEventSignaled != _playStartStopEvent.Wait(5000)) + { + WEBRTC_TRACE( + kTraceError, + kTraceAudioDevice, + _id, + "%s: Playout thread shutdown timed out, cannot " + "terminate thread", + __FUNCTION__); + // If we close thread anyway, the app will crash + return -1; + } + _playStartStopEvent.Reset(); + _critSect.Enter(); + + // Close down play thread + ThreadWrapper* tmpThread = _ptrThreadPlay; + _ptrThreadPlay = NULL; + _critSect.Leave(); + tmpThread->SetNotAlive(); + _timeEventPlay.Set(); + if (tmpThread->Stop()) + { + delete tmpThread; + _jniEnvPlay = NULL; + } + else + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " failed to close down the play audio thread"); + } + _critSect.Enter(); + + _playThreadIsInitialized = false; + } + _speakerIsInitialized = false; + _playoutDeviceIsSpecified = false; + + // get the JNI env for this thread + JNIEnv *env; + bool isAttached = false; + + // get the JNI env for this thread + if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) + { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = _javaVM->AttachCurrentThread(&env, NULL); + if ((res < 0) || !env) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, res, env); + return -1; + } + isAttached = true; + } + + // Make method IDs and buffer pointers unusable + _javaMidPlayAudio = 0; + _javaDirectPlayBuffer = NULL; + + // Delete the references to the java buffers, this allows the + // garbage collector to delete them + env->DeleteGlobalRef(_javaPlayBuffer); + _javaPlayBuffer = 0; + + // Delete the references to the java object and class, this allows the + // garbage collector to delete them + env->DeleteGlobalRef(_javaScObj); + _javaScObj = 0; + _javaScClass = 0; + + // Detach this thread if it was attached + if (isAttached) + { + if (_javaVM->DetachCurrentThread() < 0) + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + "%s: Could not detach thread from JVM", __FUNCTION__); + } + } + + _initialized = false; + + return 0; +} + +int32_t AudioTrackJni::PlayoutDeviceName(uint16_t index, + char name[kAdmMaxDeviceNameSize], + char guid[kAdmMaxGuidSize]) { + if (0 != index) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Device index is out of range [0,0]"); + return -1; + } + + // Return empty string + memset(name, 0, kAdmMaxDeviceNameSize); + + if (guid) + { + memset(guid, 0, kAdmMaxGuidSize); + } + + return 0; +} + +int32_t AudioTrackJni::SetPlayoutDevice(uint16_t index) { + if (_playIsInitialized) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Playout already initialized"); + return -1; + } + + if (0 != index) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Device index is out of range [0,0]"); + return -1; + } + + // Do nothing but set a flag, this is to have consistent behavior + // with other platforms + _playoutDeviceIsSpecified = true; + + return 0; +} + +int32_t AudioTrackJni::SetPlayoutDevice( + AudioDeviceModule::WindowsDeviceType device) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + + +int32_t AudioTrackJni::PlayoutIsAvailable(bool& available) { // NOLINT + available = false; + + // Try to initialize the playout side + int32_t res = InitPlayout(); + + // Cancel effect of initialization + StopPlayout(); + + if (res != -1) + { + available = true; + } + + return res; +} + +int32_t AudioTrackJni::InitPlayout() { + CriticalSectionScoped lock(&_critSect); + + if (!_initialized) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Not initialized"); + return -1; + } + + if (_playing) + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Playout already started"); + return -1; + } + + if (!_playoutDeviceIsSpecified) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Playout device is not specified"); + return -1; + } + + if (_playIsInitialized) + { + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, + " Playout already initialized"); + return 0; + } + + // Initialize the speaker + if (InitSpeaker() == -1) + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " InitSpeaker() failed"); + } + + // get the JNI env for this thread + JNIEnv *env; + bool isAttached = false; + + // get the JNI env for this thread + if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) + { + WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, + "attaching"); + + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = _javaVM->AttachCurrentThread(&env, NULL); + if ((res < 0) || !env) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Could not attach thread to JVM (%d, %p)", res, env); + return -1; + } + isAttached = true; + } + + // get the method ID + jmethodID initPlaybackID = env->GetMethodID(_javaScClass, "InitPlayback", + "(I)I"); + + int retVal = -1; + + // Call java sc object method + jint res = env->CallIntMethod(_javaScObj, initPlaybackID, _samplingFreqOut); + if (res < 0) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "InitPlayback failed (%d)", res); + } + else + { + // Set the audio device buffer sampling rate + _ptrAudioBuffer->SetPlayoutSampleRate(_samplingFreqOut); + _playIsInitialized = true; + retVal = 0; + } + + // Detach this thread if it was attached + if (isAttached) + { + WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, + "detaching"); + if (_javaVM->DetachCurrentThread() < 0) + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Could not detach thread from JVM"); + } + } + + return retVal; +} + +int32_t AudioTrackJni::StartPlayout() { + CriticalSectionScoped lock(&_critSect); + + if (!_playIsInitialized) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Playout not initialized"); + return -1; + } + + if (_playing) + { + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, + " Playout already started"); + return 0; + } + + // get the JNI env for this thread + JNIEnv *env; + bool isAttached = false; + + // get the JNI env for this thread + if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) + { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = _javaVM->AttachCurrentThread(&env, NULL); + if ((res < 0) || !env) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Could not attach thread to JVM (%d, %p)", res, env); + return -1; + } + isAttached = true; + } + + // get the method ID + jmethodID startPlaybackID = env->GetMethodID(_javaScClass, "StartPlayback", + "()I"); + + // Call java sc object method + jint res = env->CallIntMethod(_javaScObj, startPlaybackID); + if (res < 0) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "StartPlayback failed (%d)", res); + return -1; + } + + _playWarning = 0; + _playError = 0; + + // Signal to playout thread that we want to start + _startPlay = true; + _timeEventPlay.Set(); // Release thread from waiting state + _critSect.Leave(); + // Wait for thread to init + if (kEventSignaled != _playStartStopEvent.Wait(5000)) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Timeout or error starting"); + } + _playStartStopEvent.Reset(); + _critSect.Enter(); + + // Detach this thread if it was attached + if (isAttached) + { + if (_javaVM->DetachCurrentThread() < 0) + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Could not detach thread from JVM"); + } + } + + return 0; +} + +int32_t AudioTrackJni::StopPlayout() { + CriticalSectionScoped lock(&_critSect); + + if (!_playIsInitialized) + { + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, + " Playout is not initialized"); + return 0; + } + + // get the JNI env for this thread + JNIEnv *env; + bool isAttached = false; + + // get the JNI env for this thread + if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) + { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = _javaVM->AttachCurrentThread(&env, NULL); + if ((res < 0) || !env) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Could not attach thread to JVM (%d, %p)", res, env); + return -1; + } + isAttached = true; + } + + // get the method ID + jmethodID stopPlaybackID = env->GetMethodID(_javaScClass, "StopPlayback", + "()I"); + + // Call java sc object method + jint res = env->CallIntMethod(_javaScObj, stopPlaybackID); + if (res < 0) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "StopPlayback failed (%d)", res); + } + + _playIsInitialized = false; + _playing = false; + _playWarning = 0; + _playError = 0; + + // Detach this thread if it was attached + if (isAttached) + { + if (_javaVM->DetachCurrentThread() < 0) + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Could not detach thread from JVM"); + } + } + + return 0; + +} + +int32_t AudioTrackJni::SpeakerIsAvailable(bool& available) { // NOLINT + // We always assume it's available + available = true; + return 0; +} + +int32_t AudioTrackJni::InitSpeaker() { + CriticalSectionScoped lock(&_critSect); + + if (_playing) + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Playout already started"); + return -1; + } + + if (!_playoutDeviceIsSpecified) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Playout device is not specified"); + return -1; + } + + // Nothing needs to be done here, we use a flag to have consistent + // behavior with other platforms + _speakerIsInitialized = true; + + return 0; +} + +int32_t AudioTrackJni::SpeakerVolumeIsAvailable(bool& available) { // NOLINT + available = true; // We assume we are always be able to set/get volume + return 0; +} + +int32_t AudioTrackJni::SetSpeakerVolume(uint32_t volume) { + if (!_speakerIsInitialized) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Speaker not initialized"); + return -1; + } + if (!globalContext) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Context is not set"); + return -1; + } + + // get the JNI env for this thread + JNIEnv *env; + bool isAttached = false; + + if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) + { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = _javaVM->AttachCurrentThread(&env, NULL); + if ((res < 0) || !env) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Could not attach thread to JVM (%d, %p)", res, env); + return -1; + } + isAttached = true; + } + + // get the method ID + jmethodID setPlayoutVolumeID = env->GetMethodID(_javaScClass, + "SetPlayoutVolume", "(I)I"); + + // call java sc object method + jint res = env->CallIntMethod(_javaScObj, setPlayoutVolumeID, + static_cast (volume)); + if (res < 0) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "SetPlayoutVolume failed (%d)", res); + return -1; + } + + // Detach this thread if it was attached + if (isAttached) + { + if (_javaVM->DetachCurrentThread() < 0) + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Could not detach thread from JVM"); + } + } + + return 0; +} + +int32_t AudioTrackJni::SpeakerVolume(uint32_t& volume) const { // NOLINT + if (!_speakerIsInitialized) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Speaker not initialized"); + return -1; + } + if (!globalContext) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Context is not set"); + return -1; + } + + // get the JNI env for this thread + JNIEnv *env; + bool isAttached = false; + + if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) + { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = _javaVM->AttachCurrentThread(&env, NULL); + if ((res < 0) || !env) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Could not attach thread to JVM (%d, %p)", res, env); + return -1; + } + isAttached = true; + } + + // get the method ID + jmethodID getPlayoutVolumeID = env->GetMethodID(_javaScClass, + "GetPlayoutVolume", "()I"); + + // call java sc object method + jint level = env->CallIntMethod(_javaScObj, getPlayoutVolumeID); + if (level < 0) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "GetPlayoutVolume failed (%d)", level); + return -1; + } + + // Detach this thread if it was attached + if (isAttached) + { + if (_javaVM->DetachCurrentThread() < 0) + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Could not detach thread from JVM"); + } + } + + volume = static_cast (level); + + return 0; +} + + +int32_t AudioTrackJni::MaxSpeakerVolume(uint32_t& maxVolume) const { // NOLINT + if (!_speakerIsInitialized) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Speaker not initialized"); + return -1; + } + + maxVolume = _maxSpeakerVolume; + + return 0; +} + +int32_t AudioTrackJni::MinSpeakerVolume(uint32_t& minVolume) const { // NOLINT + if (!_speakerIsInitialized) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Speaker not initialized"); + return -1; + } + minVolume = 0; + return 0; +} + +int32_t AudioTrackJni::SpeakerVolumeStepSize( + uint16_t& stepSize) const { // NOLINT + if (!_speakerIsInitialized) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Speaker not initialized"); + return -1; + } + + stepSize = 1; + + return 0; +} + +int32_t AudioTrackJni::SpeakerMuteIsAvailable(bool& available) { // NOLINT + available = false; // Speaker mute not supported on Android + return 0; +} + +int32_t AudioTrackJni::SetSpeakerMute(bool enable) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +int32_t AudioTrackJni::SpeakerMute(bool& /*enabled*/) const { + + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +int32_t AudioTrackJni::StereoPlayoutIsAvailable(bool& available) { // NOLINT + available = false; // Stereo playout not supported on Android + return 0; +} + +int32_t AudioTrackJni::SetStereoPlayout(bool enable) { + if (enable) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Enabling not available"); + return -1; + } + + return 0; +} + +int32_t AudioTrackJni::StereoPlayout(bool& enabled) const { // NOLINT + enabled = false; + return 0; +} + +int32_t AudioTrackJni::SetPlayoutBuffer( + const AudioDeviceModule::BufferType type, + uint16_t sizeMS) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + + +int32_t AudioTrackJni::PlayoutBuffer( + AudioDeviceModule::BufferType& type, // NOLINT + uint16_t& sizeMS) const { // NOLINT + type = AudioDeviceModule::kAdaptiveBufferSize; + sizeMS = _delayPlayout; // Set to current playout delay + + return 0; +} + +int32_t AudioTrackJni::PlayoutDelay(uint16_t& delayMS) const { // NOLINT + delayMS = _delayPlayout; + return 0; +} + +void AudioTrackJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) { + CriticalSectionScoped lock(&_critSect); + _ptrAudioBuffer = audioBuffer; + // inform the AudioBuffer about default settings for this implementation + _ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC); + _ptrAudioBuffer->SetPlayoutChannels(N_PLAY_CHANNELS); +} + +int32_t AudioTrackJni::SetPlayoutSampleRate(const uint32_t samplesPerSec) { + if (samplesPerSec > 48000 || samplesPerSec < 8000) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Invalid sample rate"); + return -1; + } + + // set the playout sample rate to use + _samplingFreqOut = samplesPerSec; + + // Update the AudioDeviceBuffer + _ptrAudioBuffer->SetPlayoutSampleRate(samplesPerSec); + + return 0; +} + +bool AudioTrackJni::PlayoutWarning() const { + return (_playWarning > 0); +} + +bool AudioTrackJni::PlayoutError() const { + return (_playError > 0); +} + +void AudioTrackJni::ClearPlayoutWarning() { + _playWarning = 0; +} + +void AudioTrackJni::ClearPlayoutError() { + _playError = 0; +} + +int32_t AudioTrackJni::SetLoudspeakerStatus(bool enable) { + if (!globalContext) + { + WEBRTC_TRACE(kTraceError, kTraceUtility, -1, + " Context is not set"); + return -1; + } + + // get the JNI env for this thread + JNIEnv *env; + bool isAttached = false; + + if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) + { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = _javaVM->AttachCurrentThread(&env, NULL); + + // Get the JNI env for this thread + if ((res < 0) || !env) + { + WEBRTC_TRACE(kTraceError, kTraceUtility, -1, + " Could not attach thread to JVM (%d, %p)", res, env); + return -1; + } + isAttached = true; + } + + // get the method ID + jmethodID setPlayoutSpeakerID = env->GetMethodID(_javaScClass, + "SetPlayoutSpeaker", + "(Z)I"); + + // call java sc object method + jint res = env->CallIntMethod(_javaScObj, setPlayoutSpeakerID, enable); + if (res < 0) + { + WEBRTC_TRACE(kTraceError, kTraceUtility, -1, + " SetPlayoutSpeaker failed (%d)", res); + return -1; + } + + _loudSpeakerOn = enable; + + // Detach this thread if it was attached + if (isAttached) + { + if (_javaVM->DetachCurrentThread() < 0) + { + WEBRTC_TRACE(kTraceWarning, kTraceUtility, -1, + " Could not detach thread from JVM"); + } + } + + return 0; +} + +int32_t AudioTrackJni::GetLoudspeakerStatus(bool& enabled) const { // NOLINT + enabled = _loudSpeakerOn; + return 0; +} + +int32_t AudioTrackJni::InitJavaResources() { + // todo: Check if we already have created the java object + _javaVM = globalJvm; + _javaScClass = globalScClass; + + // use the jvm that has been set + if (!_javaVM) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: Not a valid Java VM pointer", __FUNCTION__); + return -1; + } + + // get the JNI env for this thread + JNIEnv *env; + bool isAttached = false; + + // get the JNI env for this thread + if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) + { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = _javaVM->AttachCurrentThread(&env, NULL); + if ((res < 0) || !env) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, res, env); + return -1; + } + isAttached = true; + } + + WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, + "get method id"); + + // get the method ID for the void(void) constructor + jmethodID cid = env->GetMethodID(_javaScClass, "", "()V"); + if (cid == NULL) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: could not get constructor ID", __FUNCTION__); + return -1; /* exception thrown */ + } + + WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, + "construct object", __FUNCTION__); + + // construct the object + jobject javaScObjLocal = env->NewObject(_javaScClass, cid); + if (!javaScObjLocal) + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + "%s: could not create Java sc object", __FUNCTION__); + return -1; + } + + // Create a reference to the object (to tell JNI that we are referencing it + // after this function has returned). + _javaScObj = env->NewGlobalRef(javaScObjLocal); + if (!_javaScObj) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: could not create Java sc object reference", + __FUNCTION__); + return -1; + } + + // Delete local object ref, we only use the global ref. + env->DeleteLocalRef(javaScObjLocal); + + ////////////////////// + // AUDIO MANAGEMENT + + // This is not mandatory functionality + if (globalContext) { + jfieldID context_id = env->GetFieldID(globalScClass, + "_context", + "Landroid/content/Context;"); + if (!context_id) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: could not get _context id", __FUNCTION__); + return -1; + } + + env->SetObjectField(_javaScObj, context_id, globalContext); + jobject javaContext = env->GetObjectField(_javaScObj, context_id); + if (!javaContext) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: could not set or get _context", __FUNCTION__); + return -1; + } + } + else { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + "%s: did not set Context - some functionality is not " + "supported", + __FUNCTION__); + } + + ///////////// + // PLAYOUT + + // Get play buffer field ID. + jfieldID fidPlayBuffer = env->GetFieldID(_javaScClass, "_playBuffer", + "Ljava/nio/ByteBuffer;"); + if (!fidPlayBuffer) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: could not get play buffer fid", __FUNCTION__); + return -1; + } + + // Get play buffer object. + jobject javaPlayBufferLocal = + env->GetObjectField(_javaScObj, fidPlayBuffer); + if (!javaPlayBufferLocal) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: could not get play buffer", __FUNCTION__); + return -1; + } + + // Create a global reference to the object (to tell JNI that we are + // referencing it after this function has returned) + // NOTE: we are referencing it only through the direct buffer (see below). + _javaPlayBuffer = env->NewGlobalRef(javaPlayBufferLocal); + if (!_javaPlayBuffer) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: could not get play buffer reference", __FUNCTION__); + return -1; + } + + // Delete local object ref, we only use the global ref. + env->DeleteLocalRef(javaPlayBufferLocal); + + // Get direct buffer. + _javaDirectPlayBuffer = env->GetDirectBufferAddress(_javaPlayBuffer); + if (!_javaDirectPlayBuffer) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: could not get direct play buffer", __FUNCTION__); + return -1; + } + + // Get the play audio method ID. + _javaMidPlayAudio = env->GetMethodID(_javaScClass, "PlayAudio", "(I)I"); + if (!_javaMidPlayAudio) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: could not get play audio mid", __FUNCTION__); + return -1; + } + + // Detach this thread if it was attached. + if (isAttached) + { + if (_javaVM->DetachCurrentThread() < 0) + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + "%s: Could not detach thread from JVM", __FUNCTION__); + } + } + + return 0; + +} + +int32_t AudioTrackJni::InitSampleRate() { + int samplingFreq = 44100; + jint res = 0; + + // get the JNI env for this thread + JNIEnv *env; + bool isAttached = false; + + // get the JNI env for this thread + if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) + { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = _javaVM->AttachCurrentThread(&env, NULL); + if ((res < 0) || !env) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, res, env); + return -1; + } + isAttached = true; + } + + // get the method ID + jmethodID initPlaybackID = env->GetMethodID(_javaScClass, "InitPlayback", + "(I)I"); + + if (_samplingFreqOut > 0) + { + // read the configured sampling rate + samplingFreq = _samplingFreqOut; + WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, + " Trying configured playback sampling rate %d", + samplingFreq); + } + else + { + // set the preferred sampling frequency + if (samplingFreq == 8000) + { + // try 16000 + samplingFreq = 16000; + } + // else use same as recording + } + + bool keepTrying = true; + while (keepTrying) + { + // call java sc object method + res = env->CallIntMethod(_javaScObj, initPlaybackID, samplingFreq); + if (res < 0) + { + switch (samplingFreq) + { + case 44100: + samplingFreq = 16000; + break; + case 16000: + samplingFreq = 8000; + break; + default: // error + WEBRTC_TRACE(kTraceError, + kTraceAudioDevice, _id, + "InitPlayback failed (%d)", res); + return -1; + } + } + else + { + keepTrying = false; + } + } + + // Store max playout volume + _maxSpeakerVolume = static_cast (res); + if (_maxSpeakerVolume < 1) + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Did not get valid max speaker volume value (%d)", + _maxSpeakerVolume); + } + + // set the playback sample rate to use + _samplingFreqOut = samplingFreq; + + WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, + "Playback sample rate set to (%d)", _samplingFreqOut); + + // get the method ID + jmethodID stopPlaybackID = env->GetMethodID(_javaScClass, "StopPlayback", + "()I"); + + // Call java sc object method + res = env->CallIntMethod(_javaScObj, stopPlaybackID); + if (res < 0) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "StopPlayback failed (%d)", res); + } + + // Detach this thread if it was attached + if (isAttached) + { + if (_javaVM->DetachCurrentThread() < 0) + { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + "%s: Could not detach thread from JVM", __FUNCTION__); + } + } + + return 0; + +} + +bool AudioTrackJni::PlayThreadFunc(void* pThis) +{ + return (static_cast (pThis)->PlayThreadProcess()); +} + +bool AudioTrackJni::PlayThreadProcess() +{ + if (!_playThreadIsInitialized) + { + // Do once when thread is started + + // Attach this thread to JVM and get the JNI env for this thread + jint res = _javaVM->AttachCurrentThread(&_jniEnvPlay, NULL); + if ((res < 0) || !_jniEnvPlay) + { + WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, + _id, + "Could not attach playout thread to JVM (%d, %p)", + res, _jniEnvPlay); + return false; // Close down thread + } + + _playThreadIsInitialized = true; + } + + if (!_playing) + { + switch (_timeEventPlay.Wait(1000)) + { + case kEventSignaled: + WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, + _id, "Playout thread event signal"); + _timeEventPlay.Reset(); + break; + case kEventError: + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, + _id, "Playout thread event error"); + return true; + case kEventTimeout: + WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, + _id, "Playout thread event timeout"); + return true; + } + } + + Lock(); + + if (_startPlay) + { + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, + "_startPlay true, performing initial actions"); + _startPlay = false; + _playing = true; + _playWarning = 0; + _playError = 0; + _playStartStopEvent.Set(); + WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, + "Sent signal"); + } + + if (_playing) + { + int8_t playBuffer[2 * 480]; // Max 10 ms @ 48 kHz / 16 bit + uint32_t samplesToPlay = _samplingFreqOut * 10; + + // ask for new PCM data to be played out using the AudioDeviceBuffer + // ensure that this callback is executed without taking the + // audio-thread lock + UnLock(); + uint32_t nSamples = + _ptrAudioBuffer->RequestPlayoutData(samplesToPlay); + Lock(); + + // Check again since play may have stopped during unlocked period + if (!_playing) + { + UnLock(); + return true; + } + + nSamples = _ptrAudioBuffer->GetPlayoutData(playBuffer); + if (nSamples != samplesToPlay) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " invalid number of output samples(%d)", nSamples); + _playWarning = 1; + } + + // Copy data to our direct buffer (held by java sc object) + // todo: Give _javaDirectPlayBuffer directly to VoE? + memcpy(_javaDirectPlayBuffer, playBuffer, nSamples * 2); + + UnLock(); + + // Call java sc object method to process data in direct buffer + // Will block until data has been put in OS playout buffer + // (see java sc class) + jint res = _jniEnvPlay->CallIntMethod(_javaScObj, _javaMidPlayAudio, + 2 * nSamples); + if (res < 0) + { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "PlayAudio failed (%d)", res); + _playWarning = 1; + } + else if (res > 0) + { + // we are not recording and have got a delay value from playback + _delayPlayout = (res * 1000) / _samplingFreqOut; + } + Lock(); + + } // _playing + + if (_shutdownPlayThread) + { + WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, + "Detaching thread from Java VM"); + + // Detach thread from Java VM + if (_javaVM->DetachCurrentThread() < 0) + { + WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, + _id, "Could not detach playout thread from JVM"); + _shutdownPlayThread = false; + // If we say OK (i.e. set event) and close thread anyway, + // app will crash + } + else + { + _jniEnvPlay = NULL; + _shutdownPlayThread = false; + _playStartStopEvent.Set(); // Signal to Terminate() that we are done + WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, + "Sent signal"); + } + } + + UnLock(); + return true; +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_track_jni.h b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_track_jni.h new file mode 100644 index 000000000000..7be46030b02f --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_track_jni.h @@ -0,0 +1,179 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_TRACK_JNI_H_ +#define WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_TRACK_JNI_H_ + +#include + +#include "webrtc/system_wrappers/interface/critical_section_wrapper.h" +#include "webrtc/modules/audio_device/android/audio_common.h" +#include "webrtc/modules/audio_device/include/audio_device_defines.h" +#include "webrtc/modules/audio_device/audio_device_generic.h" + +namespace webrtc { + +class EventWrapper; +class ThreadWrapper; + +class AudioTrackJni : public PlayoutDelayProvider { + public: + static int32_t SetAndroidAudioDeviceObjects(void* javaVM, void* env, + void* context); + + static int32_t SetAndroidAudioDeviceObjects(void* javaVM, + void* context); + + static void ClearAndroidAudioDeviceObjects(); + explicit AudioTrackJni(const int32_t id); + virtual ~AudioTrackJni(); + + // Main initializaton and termination + int32_t Init(); + int32_t Terminate(); + bool Initialized() const { return _initialized; } + + // Device enumeration + int16_t PlayoutDevices() { return 1; } // There is one device only. + + int32_t PlayoutDeviceName(uint16_t index, + char name[kAdmMaxDeviceNameSize], + char guid[kAdmMaxGuidSize]); + + // Device selection + int32_t SetPlayoutDevice(uint16_t index); + int32_t SetPlayoutDevice( + AudioDeviceModule::WindowsDeviceType device); + + // Audio transport initialization + int32_t PlayoutIsAvailable(bool& available); // NOLINT + int32_t InitPlayout(); + bool PlayoutIsInitialized() const { return _playIsInitialized; } + + // Audio transport control + int32_t StartPlayout(); + int32_t StopPlayout(); + bool Playing() const { return _playing; } + + // Audio mixer initialization + int32_t SpeakerIsAvailable(bool& available); // NOLINT + int32_t InitSpeaker(); + bool SpeakerIsInitialized() const { return _speakerIsInitialized; } + + // Speaker volume controls + int32_t SpeakerVolumeIsAvailable(bool& available); // NOLINT + int32_t SetSpeakerVolume(uint32_t volume); + int32_t SpeakerVolume(uint32_t& volume) const; // NOLINT + int32_t MaxSpeakerVolume(uint32_t& maxVolume) const; // NOLINT + int32_t MinSpeakerVolume(uint32_t& minVolume) const; // NOLINT + int32_t SpeakerVolumeStepSize(uint16_t& stepSize) const; // NOLINT + + // Speaker mute control + int32_t SpeakerMuteIsAvailable(bool& available); // NOLINT + int32_t SetSpeakerMute(bool enable); + int32_t SpeakerMute(bool& enabled) const; // NOLINT + + + // Stereo support + int32_t StereoPlayoutIsAvailable(bool& available); // NOLINT + int32_t SetStereoPlayout(bool enable); + int32_t StereoPlayout(bool& enabled) const; // NOLINT + + // Delay information and control + int32_t SetPlayoutBuffer(const AudioDeviceModule::BufferType type, + uint16_t sizeMS); + int32_t PlayoutBuffer(AudioDeviceModule::BufferType& type, // NOLINT + uint16_t& sizeMS) const; + int32_t PlayoutDelay(uint16_t& delayMS) const; // NOLINT + + // Attach audio buffer + void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer); + + int32_t SetPlayoutSampleRate(const uint32_t samplesPerSec); + + // Error and warning information + bool PlayoutWarning() const; + bool PlayoutError() const; + void ClearPlayoutWarning(); + void ClearPlayoutError(); + + // Speaker audio routing + int32_t SetLoudspeakerStatus(bool enable); + int32_t GetLoudspeakerStatus(bool& enable) const; // NOLINT + + static const uint32_t N_PLAY_SAMPLES_PER_SEC = 16000; // Default is 16 kHz + static const uint32_t N_PLAY_CHANNELS = 1; // default is mono playout + + protected: + virtual int PlayoutDelayMs() { return 0; } + + private: + void Lock() EXCLUSIVE_LOCK_FUNCTION(_critSect) { + _critSect.Enter(); + } + void UnLock() UNLOCK_FUNCTION(_critSect) { + _critSect.Leave(); + } + + int32_t InitJavaResources(); + int32_t InitSampleRate(); + + static bool PlayThreadFunc(void*); + bool PlayThreadProcess(); + + // TODO(leozwang): Android holds only one JVM, all these jni handling + // will be consolidated into a single place to make it consistant and + // reliable. Chromium has a good example at base/android. + static JavaVM* globalJvm; + static JNIEnv* globalJNIEnv; + static jobject globalContext; + static jclass globalScClass; + + JavaVM* _javaVM; // denotes a Java VM + JNIEnv* _jniEnvPlay; // The JNI env for playout thread + jclass _javaScClass; // AudioDeviceAndroid class + jobject _javaScObj; // AudioDeviceAndroid object + jobject _javaPlayBuffer; + void* _javaDirectPlayBuffer; // Direct buffer pointer to play buffer + jmethodID _javaMidPlayAudio; // Method ID of play in AudioDeviceAndroid + + AudioDeviceBuffer* _ptrAudioBuffer; + CriticalSectionWrapper& _critSect; + int32_t _id; + bool _initialized; + + EventWrapper& _timeEventPlay; + EventWrapper& _playStartStopEvent; + ThreadWrapper* _ptrThreadPlay; + uint32_t _playThreadID; + bool _playThreadIsInitialized; + bool _shutdownPlayThread; + bool _playoutDeviceIsSpecified; + + bool _playing; + bool _playIsInitialized; + bool _speakerIsInitialized; + + bool _startPlay; + + uint16_t _playWarning; + uint16_t _playError; + + uint16_t _delayPlayout; + + uint16_t _samplingFreqOut; // Sampling frequency for Speaker + uint32_t _maxSpeakerVolume; // The maximum speaker volume value + bool _loudSpeakerOn; + +}; + +} // namespace webrtc + +#endif // WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_TRACK_JNI_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java b/media/webrtc/trunk/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java new file mode 100644 index 000000000000..acb25966fb01 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java @@ -0,0 +1,204 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.voiceengine; + +import java.nio.ByteBuffer; +import java.util.concurrent.locks.ReentrantLock; + +import android.content.Context; +import android.media.AudioFormat; +import android.media.AudioManager; +import android.media.AudioRecord; +import android.media.MediaRecorder.AudioSource; +import android.util.Log; + +import org.mozilla.gecko.mozglue.WebRTCJNITarget; + +@WebRTCJNITarget +class WebRtcAudioRecord { + private AudioRecord _audioRecord = null; + + private Context _context; + + private ByteBuffer _recBuffer; + private byte[] _tempBufRec; + + private final ReentrantLock _recLock = new ReentrantLock(); + + private boolean _doRecInit = true; + private boolean _isRecording = false; + + private int _bufferedRecSamples = 0; + + WebRtcAudioRecord() { + try { + _recBuffer = ByteBuffer.allocateDirect(2 * 480); // Max 10 ms @ 48 + // kHz + } catch (Exception e) { + DoLog(e.getMessage()); + } + + _tempBufRec = new byte[2 * 480]; + } + + @SuppressWarnings("unused") + private int InitRecording(int audioSource, int sampleRate) { + if(android.os.Build.VERSION.SDK_INT>=11) { + audioSource = AudioSource.VOICE_COMMUNICATION; + } else { + audioSource = AudioSource.DEFAULT; + } + // get the minimum buffer size that can be used + int minRecBufSize = AudioRecord.getMinBufferSize( + sampleRate, + AudioFormat.CHANNEL_IN_MONO, + AudioFormat.ENCODING_PCM_16BIT); + + // DoLog("min rec buf size is " + minRecBufSize); + + // double size to be more safe + int recBufSize = minRecBufSize * 2; + // On average half of the samples have been recorded/buffered and the + // recording interval is 1/100s. + _bufferedRecSamples = sampleRate / 200; + // DoLog("rough rec delay set to " + _bufferedRecSamples); + + // release the object + if (_audioRecord != null) { + _audioRecord.release(); + _audioRecord = null; + } + + try { + _audioRecord = new AudioRecord( + audioSource, + sampleRate, + AudioFormat.CHANNEL_IN_MONO, + AudioFormat.ENCODING_PCM_16BIT, + recBufSize); + + } catch (Exception e) { + DoLog(e.getMessage()); + return -1; + } + + // check that the audioRecord is ready to be used + if (_audioRecord.getState() != AudioRecord.STATE_INITIALIZED) { + // DoLog("rec not initialized " + sampleRate); + return -1; + } + + // DoLog("rec sample rate set to " + sampleRate); + + return _bufferedRecSamples; + } + + @SuppressWarnings("unused") + private int StartRecording() { + // start recording + try { + _audioRecord.startRecording(); + + } catch (IllegalStateException e) { + e.printStackTrace(); + return -1; + } + + _isRecording = true; + return 0; + } + + @SuppressWarnings("unused") + private int StopRecording() { + _recLock.lock(); + try { + // only stop if we are recording + if (_audioRecord.getRecordingState() == + AudioRecord.RECORDSTATE_RECORDING) { + // stop recording + try { + _audioRecord.stop(); + } catch (IllegalStateException e) { + e.printStackTrace(); + return -1; + } + } + + // release the object + _audioRecord.release(); + _audioRecord = null; + + } finally { + // Ensure we always unlock, both for success, exception or error + // return. + _doRecInit = true; + _recLock.unlock(); + } + + _isRecording = false; + return 0; + } + + @SuppressWarnings("unused") + private int RecordAudio(int lengthInBytes) { + _recLock.lock(); + + try { + if (_audioRecord == null) { + return -2; // We have probably closed down while waiting for rec + // lock + } + + // Set priority, only do once + if (_doRecInit == true) { + try { + android.os.Process.setThreadPriority( + android.os.Process.THREAD_PRIORITY_URGENT_AUDIO); + } catch (Exception e) { + DoLog("Set rec thread priority failed: " + e.getMessage()); + } + _doRecInit = false; + } + + int readBytes = 0; + _recBuffer.rewind(); // Reset the position to start of buffer + readBytes = _audioRecord.read(_tempBufRec, 0, lengthInBytes); + // DoLog("read " + readBytes + "from SC"); + _recBuffer.put(_tempBufRec); + + if (readBytes != lengthInBytes) { + // DoLog("Could not read all data from sc (read = " + readBytes + // + ", length = " + lengthInBytes + ")"); + return -1; + } + + } catch (Exception e) { + DoLogErr("RecordAudio try failed: " + e.getMessage()); + + } finally { + // Ensure we always unlock, both for success, exception or error + // return. + _recLock.unlock(); + } + + return _bufferedRecSamples; + } + + final String logTag = "WebRTC AR java"; + + private void DoLog(String msg) { + Log.d(logTag, msg); + } + + private void DoLogErr(String msg) { + Log.e(logTag, msg); + } +} diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRTCAudioDevice.java b/media/webrtc/trunk/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java similarity index 59% rename from media/webrtc/trunk/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRTCAudioDevice.java rename to media/webrtc/trunk/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java index 550322527778..ec18b31ae0e8 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRTCAudioDevice.java +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -22,114 +22,38 @@ import android.util.Log; import org.mozilla.gecko.mozglue.WebRTCJNITarget; -class WebRTCAudioDevice { +@WebRTCJNITarget +class WebRtcAudioTrack { private AudioTrack _audioTrack = null; - private AudioRecord _audioRecord = null; - @WebRTCJNITarget private Context _context; private AudioManager _audioManager; private ByteBuffer _playBuffer; - private ByteBuffer _recBuffer; private byte[] _tempBufPlay; - private byte[] _tempBufRec; private final ReentrantLock _playLock = new ReentrantLock(); - private final ReentrantLock _recLock = new ReentrantLock(); private boolean _doPlayInit = true; private boolean _doRecInit = true; private boolean _isRecording = false; private boolean _isPlaying = false; - private int _bufferedRecSamples = 0; private int _bufferedPlaySamples = 0; private int _playPosition = 0; - @WebRTCJNITarget - WebRTCAudioDevice() { + WebRtcAudioTrack() { try { _playBuffer = ByteBuffer.allocateDirect(2 * 480); // Max 10 ms @ 48 // kHz - _recBuffer = ByteBuffer.allocateDirect(2 * 480); // Max 10 ms @ 48 - // kHz } catch (Exception e) { DoLog(e.getMessage()); } _tempBufPlay = new byte[2 * 480]; - _tempBufRec = new byte[2 * 480]; } @SuppressWarnings("unused") - @WebRTCJNITarget - private int InitRecording(int audioSource, int sampleRate) { - // get the minimum buffer size that can be used - int minRecBufSize = AudioRecord.getMinBufferSize( - sampleRate, - AudioFormat.CHANNEL_IN_MONO, - AudioFormat.ENCODING_PCM_16BIT); - - // DoLog("min rec buf size is " + minRecBufSize); - - // double size to be more safe - int recBufSize = minRecBufSize * 2; - _bufferedRecSamples = (5 * sampleRate) / 200; - // DoLog("rough rec delay set to " + _bufferedRecSamples); - - // release the object - if (_audioRecord != null) { - _audioRecord.release(); - _audioRecord = null; - } - - try { - _audioRecord = new AudioRecord( - audioSource, - sampleRate, - AudioFormat.CHANNEL_IN_MONO, - AudioFormat.ENCODING_PCM_16BIT, - recBufSize); - - } catch (Exception e) { - DoLog(e.getMessage()); - return -1; - } - - // check that the audioRecord is ready to be used - if (_audioRecord.getState() != AudioRecord.STATE_INITIALIZED) { - // DoLog("rec not initialized " + sampleRate); - return -1; - } - - // DoLog("rec sample rate set to " + sampleRate); - - return _bufferedRecSamples; - } - - @SuppressWarnings("unused") - @WebRTCJNITarget - private int StartRecording() { - if (_isPlaying == false) { - SetAudioMode(true); - } - - // start recording - try { - _audioRecord.startRecording(); - - } catch (IllegalStateException e) { - e.printStackTrace(); - return -1; - } - - _isRecording = true; - return 0; - } - - @SuppressWarnings("unused") - @WebRTCJNITarget private int InitPlayback(int sampleRate) { // get the minimum buffer size that can be used int minPlayBufSize = AudioTrack.getMinBufferSize( @@ -187,12 +111,7 @@ class WebRTCAudioDevice { } @SuppressWarnings("unused") - @WebRTCJNITarget private int StartPlayback() { - if (_isRecording == false) { - SetAudioMode(true); - } - // start playout try { _audioTrack.play(); @@ -207,43 +126,6 @@ class WebRTCAudioDevice { } @SuppressWarnings("unused") - @WebRTCJNITarget - private int StopRecording() { - _recLock.lock(); - try { - // only stop if we are recording - if (_audioRecord.getRecordingState() == - AudioRecord.RECORDSTATE_RECORDING) { - // stop recording - try { - _audioRecord.stop(); - } catch (IllegalStateException e) { - e.printStackTrace(); - return -1; - } - } - - // release the object - _audioRecord.release(); - _audioRecord = null; - - } finally { - // Ensure we always unlock, both for success, exception or error - // return. - _doRecInit = true; - _recLock.unlock(); - } - - if (_isPlaying == false) { - SetAudioMode(false); - } - - _isRecording = false; - return 0; - } - - @SuppressWarnings("unused") - @WebRTCJNITarget private int StopPlayback() { _playLock.lock(); try { @@ -272,20 +154,13 @@ class WebRTCAudioDevice { _playLock.unlock(); } - if (_isRecording == false) { - SetAudioMode(false); - } - _isPlaying = false; return 0; } @SuppressWarnings("unused") - @WebRTCJNITarget private int PlayAudio(int lengthInBytes) { - int bufferedSamples = 0; - _playLock.lock(); try { if (_audioTrack == null) { @@ -322,10 +197,6 @@ class WebRTCAudioDevice { _bufferedPlaySamples -= (pos - _playPosition); _playPosition = pos; - if (!_isRecording) { - bufferedSamples = _bufferedPlaySamples; - } - if (written != lengthInBytes) { // DoLog("Could not write all data to sc (written = " + written // + ", length = " + lengthInBytes + ")"); @@ -338,57 +209,10 @@ class WebRTCAudioDevice { _playLock.unlock(); } - return bufferedSamples; + return _bufferedPlaySamples; } @SuppressWarnings("unused") - @WebRTCJNITarget - private int RecordAudio(int lengthInBytes) { - _recLock.lock(); - - try { - if (_audioRecord == null) { - return -2; // We have probably closed down while waiting for rec - // lock - } - - // Set priority, only do once - if (_doRecInit == true) { - try { - android.os.Process.setThreadPriority( - android.os.Process.THREAD_PRIORITY_URGENT_AUDIO); - } catch (Exception e) { - DoLog("Set rec thread priority failed: " + e.getMessage()); - } - _doRecInit = false; - } - - int readBytes = 0; - _recBuffer.rewind(); // Reset the position to start of buffer - readBytes = _audioRecord.read(_tempBufRec, 0, lengthInBytes); - // DoLog("read " + readBytes + "from SC"); - _recBuffer.put(_tempBufRec); - - if (readBytes != lengthInBytes) { - // DoLog("Could not read all data from sc (read = " + readBytes - // + ", length = " + lengthInBytes + ")"); - return -1; - } - - } catch (Exception e) { - DoLogErr("RecordAudio try failed: " + e.getMessage()); - - } finally { - // Ensure we always unlock, both for success, exception or error - // return. - _recLock.unlock(); - } - - return (_bufferedPlaySamples); - } - - @SuppressWarnings("unused") - @WebRTCJNITarget private int SetPlayoutSpeaker(boolean loudspeakerOn) { // create audio manager if needed if (_audioManager == null && _context != null) { @@ -438,7 +262,6 @@ class WebRTCAudioDevice { } @SuppressWarnings("unused") - @WebRTCJNITarget private int SetPlayoutVolume(int level) { // create audio manager if needed @@ -459,7 +282,6 @@ class WebRTCAudioDevice { } @SuppressWarnings("unused") - @WebRTCJNITarget private int GetPlayoutVolume() { // create audio manager if needed @@ -478,36 +300,7 @@ class WebRTCAudioDevice { return level; } - private void SetAudioMode(boolean startCall) { - int apiLevel = android.os.Build.VERSION.SDK_INT; - - if (_audioManager == null && _context != null) { - _audioManager = (AudioManager) - _context.getSystemService(Context.AUDIO_SERVICE); - } - - if (_audioManager == null) { - DoLogErr("Could not set audio mode - no audio manager"); - return; - } - - // ***IMPORTANT*** When the API level for honeycomb (H) has been - // decided, - // the condition should be changed to include API level 8 to H-1. - if ((android.os.Build.BRAND.equals("Samsung") || - android.os.Build.BRAND.equals("samsung")) && - (8 == apiLevel)) { - // Set Samsung specific VoIP mode for 2.2 devices - // 4 is VoIP mode - int mode = (startCall ? 4 : AudioManager.MODE_NORMAL); - _audioManager.setMode(mode); - if (_audioManager.getMode() != mode) { - DoLogErr("Could not set audio mode for Samsung device"); - } - } - } - - final String logTag = "WebRTC AD java"; + final String logTag = "WebRTC AT java"; private void DoLog(String msg) { Log.d(logTag, msg); diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/low_latency_event_posix.cc b/media/webrtc/trunk/webrtc/modules/audio_device/android/low_latency_event_posix.cc index 4e0c88a4d63f..f25b030d04cf 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/android/low_latency_event_posix.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/low_latency_event_posix.cc @@ -20,6 +20,17 @@ eintr_wrapper_result; \ }) +#define IGNORE_EINTR(x) ({ \ + typeof(x) eintr_wrapper_result; \ + do { \ + eintr_wrapper_result = (x); \ + if (eintr_wrapper_result == -1 && errno == EINTR) { \ + eintr_wrapper_result = 0; \ + } \ + } while (0); \ + eintr_wrapper_result; \ + }) + namespace webrtc { const LowLatencyEvent::Handle LowLatencyEvent::kInvalidHandle = -1; @@ -61,7 +72,7 @@ bool LowLatencyEvent::Close(Handle* handle) { if (*handle == kInvalidHandle) { return false; } - int retval = HANDLE_EINTR(close(*handle)); + int retval = IGNORE_EINTR(close(*handle)); *handle = kInvalidHandle; return retval == 0; } diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_common.cc b/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_common.cc index be70e44de5b0..9a16f7071c17 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_common.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_common.cc @@ -12,6 +12,10 @@ #include +#include "webrtc/modules/audio_device/android/audio_common.h" + +using webrtc::kNumChannels; + namespace webrtc_opensl { SLDataFormat_PCM CreatePcmConfiguration(int sample_rate) { diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_common.h b/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_common.h index e1521718444a..daa51a28682c 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_common.h +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_common.h @@ -15,21 +15,6 @@ namespace webrtc_opensl { -enum { - kDefaultSampleRate = 44100, - kNumChannels = 1 -}; - - -class PlayoutDelayProvider { - public: - virtual int PlayoutDelayMs() = 0; - - protected: - PlayoutDelayProvider() {} - virtual ~PlayoutDelayProvider() {} -}; - SLDataFormat_PCM CreatePcmConfiguration(int sample_rate); } // namespace webrtc_opensl diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.cc b/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.cc index b04ddaa51448..1fc6ca630c07 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.cc @@ -13,15 +13,14 @@ #include #include +#include "webrtc/modules/audio_device/android/audio_common.h" +#include "webrtc/modules/audio_device/android/opensles_common.h" #include "webrtc/modules/audio_device/android/single_rw_fifo.h" #include "webrtc/modules/audio_device/audio_device_buffer.h" #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" #include "webrtc/system_wrappers/interface/thread_wrapper.h" #include "webrtc/system_wrappers/interface/trace.h" -using webrtc_opensl::kDefaultSampleRate; -using webrtc_opensl::kNumChannels; - #define VOID_RETURN #define OPENSL_RETURN_ON_FAILURE(op, ret_val) \ do { \ @@ -46,8 +45,7 @@ enum { namespace webrtc { OpenSlesInput::OpenSlesInput( - const int32_t id, - webrtc_opensl::PlayoutDelayProvider* delay_provider) + const int32_t id, PlayoutDelayProvider* delay_provider) : id_(id), delay_provider_(delay_provider), initialized_(false), @@ -73,6 +71,21 @@ OpenSlesInput::OpenSlesInput( OpenSlesInput::~OpenSlesInput() { } +int32_t OpenSlesInput::SetAndroidAudioDeviceObjects(void* javaVM, + void* env, + void* context) { +#if !defined(WEBRTC_GONK) + AudioManagerJni::SetAndroidAudioDeviceObjects(javaVM, env, context); +#endif + return 0; +} + +void OpenSlesInput::ClearAndroidAudioDeviceObjects() { +#if !defined(WEBRTC_GONK) + AudioManagerJni::ClearAndroidAudioDeviceObjects(); +#endif +} + int32_t OpenSlesInput::Init() { assert(!initialized_); @@ -104,7 +117,7 @@ int32_t OpenSlesInput::Init() { // Set up OpenSL engine. OPENSL_RETURN_ON_FAILURE(f_slCreateEngine(&sles_engine_, 1, kOption, 0, - NULL, NULL), + NULL, NULL), -1); OPENSL_RETURN_ON_FAILURE((*sles_engine_)->Realize(sles_engine_, SL_BOOLEAN_FALSE), @@ -193,6 +206,7 @@ int32_t OpenSlesInput::StartRecording() { int32_t OpenSlesInput::StopRecording() { StopCbThreads(); DestroyAudioRecorder(); + recording_ = false; return 0; } diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.h b/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.h index 2b44ebb83a65..543d80c729f3 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.h +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.h @@ -19,7 +19,6 @@ #include "webrtc/modules/audio_device/android/audio_manager_jni.h" #endif #include "webrtc/modules/audio_device/android/low_latency_event.h" -#include "webrtc/modules/audio_device/android/opensles_common.h" #include "webrtc/modules/audio_device/include/audio_device.h" #include "webrtc/modules/audio_device/include/audio_device_defines.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" @@ -38,10 +37,14 @@ class ThreadWrapper; // to non-const methods require exclusive access to the object. class OpenSlesInput { public: - OpenSlesInput(const int32_t id, - webrtc_opensl::PlayoutDelayProvider* delay_provider); + OpenSlesInput(const int32_t id, PlayoutDelayProvider* delay_provider); ~OpenSlesInput(); + static int32_t SetAndroidAudioDeviceObjects(void* javaVM, + void* env, + void* context); + static void ClearAndroidAudioDeviceObjects(); + // Main initializaton and termination int32_t Init(); int32_t Terminate(); @@ -58,6 +61,9 @@ class OpenSlesInput { int32_t SetRecordingDevice( AudioDeviceModule::WindowsDeviceType device) { return -1; } + // No-op + int32_t SetRecordingSampleRate(uint32_t sample_rate_hz) { return 0; } + // Audio transport initialization int32_t RecordingIsAvailable(bool& available); // NOLINT int32_t InitRecording(); @@ -173,7 +179,7 @@ class OpenSlesInput { #endif int id_; - webrtc_opensl::PlayoutDelayProvider* delay_provider_; + PlayoutDelayProvider* delay_provider_; bool initialized_; bool mic_initialized_; bool rec_initialized_; diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_output.cc b/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_output.cc index 84bff81060bb..10fca7c0b8d0 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_output.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_output.cc @@ -8,11 +8,14 @@ * be found in the AUTHORS file in the root of the source tree. */ +#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT + #include "webrtc/modules/audio_device/android/opensles_output.h" #include #include +#include "webrtc/modules/audio_device/android/opensles_common.h" #include "webrtc/modules/audio_device/android/fine_audio_buffer.h" #include "webrtc/modules/audio_device/android/single_rw_fifo.h" #include "webrtc/modules/audio_device/audio_device_buffer.h" @@ -20,9 +23,6 @@ #include "webrtc/system_wrappers/interface/thread_wrapper.h" #include "webrtc/system_wrappers/interface/trace.h" -using webrtc_opensl::kDefaultSampleRate; -using webrtc_opensl::kNumChannels; - #define VOID_RETURN #define OPENSL_RETURN_ON_FAILURE(op, ret_val) \ do { \ @@ -73,6 +73,17 @@ OpenSlesOutput::OpenSlesOutput(const int32_t id) OpenSlesOutput::~OpenSlesOutput() { } +int32_t OpenSlesOutput::SetAndroidAudioDeviceObjects(void* javaVM, + void* env, + void* context) { + AudioManagerJni::SetAndroidAudioDeviceObjects(javaVM, env, context); + return 0; +} + +void OpenSlesOutput::ClearAndroidAudioDeviceObjects() { + AudioManagerJni::ClearAndroidAudioDeviceObjects(); +} + int32_t OpenSlesOutput::Init() { assert(!initialized_); @@ -106,7 +117,7 @@ int32_t OpenSlesOutput::Init() { // Set up OpenSl engine. OPENSL_RETURN_ON_FAILURE(f_slCreateEngine(&sles_engine_, 1, kOption, 0, - NULL, NULL), + NULL, NULL), -1); OPENSL_RETURN_ON_FAILURE((*sles_engine_)->Realize(sles_engine_, SL_BOOLEAN_FALSE), @@ -170,7 +181,6 @@ int32_t OpenSlesOutput::PlayoutIsAvailable(bool& available) { // NOLINT int32_t OpenSlesOutput::InitPlayout() { assert(initialized_); - assert(!play_initialized_); play_initialized_ = true; return 0; } @@ -207,6 +217,7 @@ int32_t OpenSlesOutput::StartPlayout() { int32_t OpenSlesOutput::StopPlayout() { StopCbThreads(); DestroyAudioPlayer(); + playing_ = false; return 0; } @@ -594,3 +605,5 @@ bool OpenSlesOutput::CbThreadImpl() { } } // namespace webrtc + +#endif diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_output.h b/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_output.h index 0fe9a454b353..a73567f35232 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_output.h +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_output.h @@ -19,7 +19,7 @@ #include "webrtc/modules/audio_device/android/audio_manager_jni.h" #endif #include "webrtc/modules/audio_device/android/low_latency_event.h" -#include "webrtc/modules/audio_device/android/opensles_common.h" +#include "webrtc/modules/audio_device/android/audio_common.h" #include "webrtc/modules/audio_device/include/audio_device_defines.h" #include "webrtc/modules/audio_device/include/audio_device.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" @@ -32,14 +32,22 @@ class FineAudioBuffer; class SingleRwFifo; class ThreadWrapper; +#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT +// allow us to replace it with a dummy + // OpenSL implementation that facilitate playing PCM data to an android device. // This class is Thread-compatible. I.e. Given an instance of this class, calls // to non-const methods require exclusive access to the object. -class OpenSlesOutput : public webrtc_opensl::PlayoutDelayProvider { +class OpenSlesOutput : public PlayoutDelayProvider { public: explicit OpenSlesOutput(const int32_t id); virtual ~OpenSlesOutput(); + static int32_t SetAndroidAudioDeviceObjects(void* javaVM, + void* env, + void* context); + static void ClearAndroidAudioDeviceObjects(); + // Main initializaton and termination int32_t Init(); int32_t Terminate(); @@ -57,6 +65,9 @@ class OpenSlesOutput : public webrtc_opensl::PlayoutDelayProvider { int32_t SetPlayoutDevice( AudioDeviceModule::WindowsDeviceType device) { return 0; } + // No-op + int32_t SetPlayoutSampleRate(uint32_t sample_rate_hz) { return 0; } + // Audio transport initialization int32_t PlayoutIsAvailable(bool& available); // NOLINT int32_t InitPlayout(); @@ -135,7 +146,7 @@ class OpenSlesOutput : public webrtc_opensl::PlayoutDelayProvider { // there will be jitter in audio pipe line due to the acquisition of locks. // Note: The buffers in the OpenSL queue do not count towards the 10ms of // frames needed since OpenSL needs to have them ready for playout. - kNum10MsToBuffer = 4, + kNum10MsToBuffer = 6, }; bool InitSampleRate(); @@ -251,6 +262,200 @@ class OpenSlesOutput : public webrtc_opensl::PlayoutDelayProvider { SLInterfaceID SL_IID_VOLUME_; }; +#else + +// Dummy OpenSlesOutput +class OpenSlesOutput : public PlayoutDelayProvider { + public: + explicit OpenSlesOutput(const int32_t id) : + initialized_(false), speaker_initialized_(false), + play_initialized_(false), playing_(false) + {} + virtual ~OpenSlesOutput() {} + + static int32_t SetAndroidAudioDeviceObjects(void* javaVM, + void* env, + void* context) { return 0; } + static void ClearAndroidAudioDeviceObjects() {} + + // Main initializaton and termination + int32_t Init() { initialized_ = true; return 0; } + int32_t Terminate() { initialized_ = false; return 0; } + bool Initialized() const { return initialized_; } + + // Device enumeration + int16_t PlayoutDevices() { return 1; } + + int32_t PlayoutDeviceName(uint16_t index, + char name[kAdmMaxDeviceNameSize], + char guid[kAdmMaxGuidSize]) + { + assert(index == 0); + // Empty strings. + name[0] = '\0'; + guid[0] = '\0'; + return 0; + } + + // Device selection + int32_t SetPlayoutDevice(uint16_t index) + { + assert(index == 0); + return 0; + } + int32_t SetPlayoutDevice( + AudioDeviceModule::WindowsDeviceType device) { return 0; } + + // No-op + int32_t SetPlayoutSampleRate(uint32_t sample_rate_hz) { return 0; } + + // Audio transport initialization + int32_t PlayoutIsAvailable(bool& available) // NOLINT + { + available = true; + return 0; + } + int32_t InitPlayout() + { + assert(initialized_); + play_initialized_ = true; + return 0; + } + bool PlayoutIsInitialized() const { return play_initialized_; } + + // Audio transport control + int32_t StartPlayout() + { + assert(play_initialized_); + assert(!playing_); + playing_ = true; + return 0; + } + + int32_t StopPlayout() + { + playing_ = false; + return 0; + } + + bool Playing() const { return playing_; } + + // Audio mixer initialization + int32_t SpeakerIsAvailable(bool& available) // NOLINT + { + available = true; + return 0; + } + int32_t InitSpeaker() + { + assert(!playing_); + speaker_initialized_ = true; + return 0; + } + bool SpeakerIsInitialized() const { return speaker_initialized_; } + + // Speaker volume controls + int32_t SpeakerVolumeIsAvailable(bool& available) // NOLINT + { + available = true; + return 0; + } + int32_t SetSpeakerVolume(uint32_t volume) + { + assert(speaker_initialized_); + assert(initialized_); + return 0; + } + int32_t SpeakerVolume(uint32_t& volume) const { return 0; } // NOLINT + int32_t MaxSpeakerVolume(uint32_t& maxVolume) const // NOLINT + { + assert(speaker_initialized_); + assert(initialized_); + maxVolume = 0; + return 0; + } + int32_t MinSpeakerVolume(uint32_t& minVolume) const // NOLINT + { + assert(speaker_initialized_); + assert(initialized_); + minVolume = 0; + return 0; + } + int32_t SpeakerVolumeStepSize(uint16_t& stepSize) const // NOLINT + { + assert(speaker_initialized_); + assert(initialized_); + stepSize = 0; + return 0; + } + + // Speaker mute control + int32_t SpeakerMuteIsAvailable(bool& available) // NOLINT + { + available = true; + return 0; + } + int32_t SetSpeakerMute(bool enable) { return -1; } + int32_t SpeakerMute(bool& enabled) const { return -1; } // NOLINT + + + // Stereo support + int32_t StereoPlayoutIsAvailable(bool& available) // NOLINT + { + available = true; + return 0; + } + int32_t SetStereoPlayout(bool enable) + { + return 0; + } + int32_t StereoPlayout(bool& enabled) const // NOLINT + { + enabled = kNumChannels == 2; + return 0; + } + + // Delay information and control + int32_t SetPlayoutBuffer(const AudioDeviceModule::BufferType type, + uint16_t sizeMS) { return -1; } + int32_t PlayoutBuffer(AudioDeviceModule::BufferType& type, // NOLINT + uint16_t& sizeMS) const + { + type = AudioDeviceModule::kAdaptiveBufferSize; + sizeMS = 40; + return 0; + } + int32_t PlayoutDelay(uint16_t& delayMS) const // NOLINT + { + delayMS = 0; + return 0; + } + + + // Error and warning information + bool PlayoutWarning() const { return false; } + bool PlayoutError() const { return false; } + void ClearPlayoutWarning() {} + void ClearPlayoutError() {} + + // Attach audio buffer + void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {} + + // Speaker audio routing + int32_t SetLoudspeakerStatus(bool enable) { return 0; } + int32_t GetLoudspeakerStatus(bool& enable) const { enable = true; return 0; } // NOLINT + + protected: + virtual int PlayoutDelayMs() { return 40; } + + private: + bool initialized_; + bool speaker_initialized_; + bool play_initialized_; + bool playing_; +}; +#endif + } // namespace webrtc #endif // WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_OPENSLES_OUTPUT_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/single_rw_fifo.cc b/media/webrtc/trunk/webrtc/modules/audio_device/android/single_rw_fifo.cc index 6e35da28b827..e63cb7f4d053 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/android/single_rw_fifo.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/single_rw_fifo.cc @@ -8,11 +8,12 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "webrtc/modules/audio_device/android/single_rw_fifo.h" #if defined(_MSC_VER) #include #endif +#include "webrtc/modules/audio_device/android/single_rw_fifo.h" + static int UpdatePos(int pos, int capacity) { return (pos + 1) % capacity; } @@ -21,14 +22,44 @@ namespace webrtc { namespace subtle { +// Start with compiler support, then processor-specific hacks +#if defined(__GNUC__) || defined(__clang__) +// Available on GCC and clang - others? inline void MemoryBarrier() { -#if defined(_MSC_VER) - ::MemoryBarrier(); -#else __sync_synchronize(); -#endif } +#elif defined(_MSC_VER) +inline void MemoryBarrier() { + ::MemoryBarrier(); +} + +#elif defined(__ARMEL__) +// From http://src.chromium.org/viewvc/chrome/trunk/src/base/atomicops_internals_arm_gcc.h +// Note that it is only the MemoryBarrier function that makes this class arm +// specific. Borrowing other MemoryBarrier implementations, this class could +// be extended to more platforms. +inline void MemoryBarrier() { + // Note: This is a function call, which is also an implicit compiler + // barrier. + typedef void (*KernelMemoryBarrierFunc)(); + ((KernelMemoryBarrierFunc)0xffff0fa0)(); +} + +#elif defined(__x86_64__) || defined (__i386__) +// From http://src.chromium.org/viewvc/chrome/trunk/src/base/atomicops_internals_x86_gcc.h +// mfence exists on x64 and x86 platforms containing SSE2. +// x86 platforms that don't have SSE2 will crash with SIGILL. +// If this code needs to run on such platforms in the future, +// add runtime CPU detection here. +inline void MemoryBarrier() { + __asm__ __volatile__("mfence" : : : "memory"); +} + +#else +#error Add an implementation of MemoryBarrier() for this platform! +#endif + } // namespace subtle SingleRwFifo::SingleRwFifo(int capacity) diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/audio_device.gypi b/media/webrtc/trunk/webrtc/modules/audio_device/audio_device.gypi index cebb982c8b03..322e5e90865b 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/audio_device.gypi +++ b/media/webrtc/trunk/webrtc/modules/audio_device/audio_device.gypi @@ -12,6 +12,7 @@ 'target_name': 'audio_device', 'type': 'static_library', 'dependencies': [ + 'webrtc_utility', '<(webrtc_root)/common_audio/common_audio.gyp:common_audio', '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', ], @@ -88,7 +89,7 @@ }], # moz_widget_toolkit_gonk==1 ['enable_android_opensl==1', { 'include_dirs': [ - 'opensl', + 'opensl', ], }], # enable_android_opensl ['include_internal_audio_device==0', { @@ -126,16 +127,15 @@ # used externally for getUserMedia 'opensl/single_rw_fifo.cc', 'opensl/single_rw_fifo.h', + 'android/audio_device_template.h', + 'android/audio_manager_jni.cc', + 'android/audio_manager_jni.h', + 'android/audio_record_jni.cc', + 'android/audio_record_jni.h', + 'android/audio_track_jni.cc', + 'android/audio_track_jni.h', ], 'conditions': [ - ['OS=="android"', { - 'sources': [ - 'opensl/audio_manager_jni.cc', - 'opensl/audio_manager_jni.h', - 'android/audio_device_jni_android.cc', - 'android/audio_device_jni_android.h', - ], - }], ['OS=="android" or moz_widget_toolkit_gonk==1', { 'link_settings': { 'libraries': [ @@ -146,8 +146,6 @@ 'conditions': [ ['enable_android_opensl==1', { 'sources': [ - 'opensl/audio_device_opensles.cc', - 'opensl/audio_device_opensles.h', 'opensl/fine_audio_buffer.cc', 'opensl/fine_audio_buffer.h', 'opensl/low_latency_event_posix.cc', @@ -164,8 +162,6 @@ 'sources': [ 'shared/audio_device_utility_shared.cc', 'shared/audio_device_utility_shared.h', - 'android/audio_device_jni_android.cc', - 'android/audio_device_jni_android.h', ], }], ['enable_android_opensl_output==1', { @@ -174,8 +170,8 @@ ], 'defines': [ 'WEBRTC_ANDROID_OPENSLES_OUTPUT', - ]}, - ], + ], + }], ], }], ['OS=="linux"', { @@ -285,10 +281,10 @@ 'target_name': 'audio_device_tests_run', 'type': 'none', 'dependencies': [ - '<(import_isolate_path):import_isolate_gypi', 'audio_device_tests', ], 'includes': [ + '../../build/isolate.gypi', 'audio_device_tests.isolate', ], 'sources': [ diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_buffer.h b/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_buffer.h index fd06e7c3c38b..84df5594b131 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_buffer.h +++ b/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_buffer.h @@ -36,13 +36,13 @@ public: int32_t InitPlayout(); int32_t InitRecording(); - int32_t SetRecordingSampleRate(uint32_t fsHz); - int32_t SetPlayoutSampleRate(uint32_t fsHz); + virtual int32_t SetRecordingSampleRate(uint32_t fsHz); + virtual int32_t SetPlayoutSampleRate(uint32_t fsHz); int32_t RecordingSampleRate() const; int32_t PlayoutSampleRate() const; - int32_t SetRecordingChannels(uint8_t channels); - int32_t SetPlayoutChannels(uint8_t channels); + virtual int32_t SetRecordingChannels(uint8_t channels); + virtual int32_t SetPlayoutChannels(uint8_t channels); uint8_t RecordingChannels() const; uint8_t PlayoutChannels() const; int32_t SetRecordingChannel( @@ -50,12 +50,13 @@ public: int32_t RecordingChannel( AudioDeviceModule::ChannelType& channel) const; - int32_t SetRecordedBuffer(const void* audioBuffer, uint32_t nSamples); + virtual int32_t SetRecordedBuffer(const void* audioBuffer, + uint32_t nSamples); int32_t SetCurrentMicLevel(uint32_t level); - void SetVQEData(int playDelayMS, - int recDelayMS, - int clockDrift); - int32_t DeliverRecordedData(); + virtual void SetVQEData(int playDelayMS, + int recDelayMS, + int clockDrift); + virtual int32_t DeliverRecordedData(); uint32_t NewMicLevel() const; virtual int32_t RequestPlayoutData(uint32_t nSamples); diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_impl.cc b/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_impl.cc index 6ce9641a4592..87fa0e9b9660 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_impl.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_impl.cc @@ -29,21 +29,20 @@ #include #include #include "audio_device_utility_android.h" - #include "audio_device_opensles_android.h" + #include "webrtc/modules/audio_device/android/audio_device_template.h" #if !defined(WEBRTC_GONK) - #include "audio_device_jni_android.h" -#endif -#elif defined(WEBRTC_ANDROID) // GONK only supports opensles; android can use that or jni - #include - #include "audio_device_utility_android.h" - #include "audio_device_jni_android.h" + #include "webrtc/modules/audio_device/android/audio_record_jni.h" + #include "webrtc/modules/audio_device/android/audio_track_jni.h" +#endif + #include "webrtc/modules/audio_device/android/opensles_input.h" + #include "webrtc/modules/audio_device/android/opensles_output.h" #elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) #include "audio_device_utility_linux.h" -#if defined(LINUX_ALSA) + #if defined(LINUX_ALSA) #include "audio_device_alsa_linux.h" -#endif -#if defined(LINUX_PULSE) + #endif + #if defined(LINUX_PULSE) #include "audio_device_pulse_linux.h" #endif #elif defined(WEBRTC_IOS) @@ -266,38 +265,46 @@ int32_t AudioDeviceModuleImpl::CreatePlatformSpecificObjects() // Create the *Android OpenSLES* implementation of the Audio Device // -#if defined(WEBRTC_ANDROID_OPENSLES) - // Check if the OpenSLES library is available before going further. - void* opensles_lib = dlopen("libOpenSLES.so", RTLD_LAZY); - if (opensles_lib) { +#if defined(WEBRTC_ANDROID) || defined (WEBRTC_GONK) + if (audioLayer == kPlatformDefaultAudio) + { + // AudioRecordJni provides hardware AEC and OpenSlesOutput low latency. +#if defined (WEBRTC_ANDROID_OPENSLES) + // Android and Gonk + // Check if the OpenSLES library is available before going further. + void* opensles_lib = dlopen("libOpenSLES.so", RTLD_LAZY); + if (opensles_lib) { // That worked, close for now and proceed normally. dlclose(opensles_lib); if (audioLayer == kPlatformDefaultAudio) { - // Create *Android OpenSLES Audio* implementation - ptrAudioDevice = new AudioDeviceAndroidOpenSLES(Id()); - WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, - "Android OpenSLES Audio APIs will be utilized"); + // Create *Android OpenSLES Audio* implementation + ptrAudioDevice = new AudioDeviceTemplate(Id()); + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, + "Android OpenSLES Audio APIs will be utilized"); } - } - + } +#endif #if !defined(WEBRTC_GONK) - // Fall back to this case if on Android 2.2/OpenSLES not available. - if (ptrAudioDevice == NULL) { + // Fall back to this case if on Android 2.2/OpenSLES not available. + if (ptrAudioDevice == NULL) { // Create the *Android Java* implementation of the Audio Device if (audioLayer == kPlatformDefaultAudio) { - // Create *Android JNI Audio* implementation - ptrAudioDevice = new AudioDeviceAndroidJni(Id()); - WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Android JNI Audio APIs will be utilized"); + // Create *Android JNI Audio* implementation + ptrAudioDevice = new AudioDeviceTemplate(Id()); + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Android JNI Audio APIs will be utilized"); } - } + } #endif + } + if (ptrAudioDevice != NULL) { - // Create the Android implementation of the Device Utility. - ptrAudioDeviceUtility = new AudioDeviceUtilityAndroid(Id()); + // Create the Android implementation of the Device Utility. + ptrAudioDeviceUtility = new AudioDeviceUtilityAndroid(Id()); } + // END #if defined(WEBRTC_ANDROID_OPENSLES) // Create the *Linux* implementation of the Audio Device // @@ -307,14 +314,16 @@ int32_t AudioDeviceModuleImpl::CreatePlatformSpecificObjects() #if defined(LINUX_PULSE) WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "attempting to use the Linux PulseAudio APIs..."); - if (AudioDeviceLinuxPulse::PulseAudioIsSupported()) + // create *Linux PulseAudio* implementation + AudioDeviceLinuxPulse* pulseDevice = new AudioDeviceLinuxPulse(Id()); + if (pulseDevice->Init() != -1) { - // create *Linux PulseAudio* implementation - ptrAudioDevice = new AudioDeviceLinuxPulse(Id()); + ptrAudioDevice = pulseDevice; WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Linux PulseAudio APIs will be utilized"); } else { + delete pulseDevice; #endif #if defined(LINUX_ALSA) // create *Linux ALSA Audio* implementation diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_tests.isolate b/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_tests.isolate index 38b397f52031..69e877c14f1c 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_tests.isolate +++ b/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_tests.isolate @@ -21,16 +21,15 @@ 'variables': { 'command': [ '../../../testing/test_env.py', - '../../../tools/swarm_client/googletest/run_test_cases.py', '<(PRODUCT_DIR)/audio_device_tests<(EXECUTABLE_SUFFIX)', ], 'isolate_dependency_tracked': [ '../../../testing/test_env.py', - '../../../tools/swarm_client/run_isolated.py', - '../../../tools/swarm_client/googletest/run_test_cases.py', - '../../../tools/swarm_client/third_party/upload.py', '<(PRODUCT_DIR)/audio_device_tests<(EXECUTABLE_SUFFIX)', ], + 'isolate_dependency_untracked': [ + '../../../tools/swarming_client/', + ], }, }], ], diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/include/audio_device_defines.h b/media/webrtc/trunk/webrtc/modules/audio_device/include/audio_device_defines.h index c37c4b13955b..9f3e24b10aab 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/include/audio_device_defines.h +++ b/media/webrtc/trunk/webrtc/modules/audio_device/include/audio_device_defines.h @@ -85,8 +85,8 @@ public: // will be ignored. // The return value is the new microphone volume, in the range of |0, 255]. // When the volume does not need to be updated, it returns 0. - // TODO(xians): Make the interface pure virtual after libjingle has its - // implementation. + // TODO(xians): Remove this interface after Chrome and Libjingle switches + // to OnData(). virtual int OnDataAvailable(const int voe_channels[], int number_of_voe_channels, const int16_t* audio_data, @@ -98,6 +98,16 @@ public: bool key_pressed, bool need_audio_processing) { return 0; } + // Method to pass the captured audio data to the specific VoE channel. + // |voe_channel| is the id of the VoE channel which is the sink to the + // capture data. + // TODO(xians): Make the interface pure virtual after libjingle + // has its implementation. + virtual void OnData(int voe_channel, const void* audio_data, + int bits_per_sample, int sample_rate, + int number_of_channels, + int number_of_frames) {} + protected: virtual ~AudioTransport() {} }; diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/include/fake_audio_device.h b/media/webrtc/trunk/webrtc/modules/audio_device/include/fake_audio_device.h index 7966716b974f..0248317550dd 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/include/fake_audio_device.h +++ b/media/webrtc/trunk/webrtc/modules/audio_device/include/fake_audio_device.h @@ -15,7 +15,7 @@ namespace webrtc { class FakeAudioDeviceModule : public AudioDeviceModule { public: FakeAudioDeviceModule() {} - ~FakeAudioDeviceModule() {} + virtual ~FakeAudioDeviceModule() {} virtual int32_t AddRef() { return 0; } virtual int32_t Release() { return 0; } virtual int32_t RegisterEventObserver(AudioDeviceObserver* eventCallback) { @@ -48,283 +48,112 @@ class FakeAudioDeviceModule : public AudioDeviceModule { virtual int32_t Process() { return 0; } virtual int32_t Terminate() { return 0; } - virtual int32_t ActiveAudioLayer(AudioLayer* audioLayer) const { - assert(false); - return 0; - } - virtual ErrorCode LastError() const { - assert(false); - return kAdmErrNone; - } - virtual bool Initialized() const { - assert(false); - return true; - } - virtual int16_t PlayoutDevices() { - assert(false); - return 0; - } - virtual int16_t RecordingDevices() { - assert(false); - return 0; - } + virtual int32_t ActiveAudioLayer(AudioLayer* audioLayer) const { return 0; } + virtual ErrorCode LastError() const { return kAdmErrNone; } + virtual bool Initialized() const { return true; } + virtual int16_t PlayoutDevices() { return 0; } + virtual int16_t RecordingDevices() { return 0; } virtual int32_t PlayoutDeviceName(uint16_t index, char name[kAdmMaxDeviceNameSize], char guid[kAdmMaxGuidSize]) { - assert(false); return 0; } virtual int32_t RecordingDeviceName(uint16_t index, char name[kAdmMaxDeviceNameSize], char guid[kAdmMaxGuidSize]) { - assert(false); return 0; } - virtual int32_t PlayoutIsAvailable(bool* available) { - assert(false); - return 0; - } - virtual int32_t InitPlayout() { - assert(false); - return 0; - } - virtual bool PlayoutIsInitialized() const { - assert(false); - return true; - } - virtual int32_t RecordingIsAvailable(bool* available) { - assert(false); - return 0; - } - virtual int32_t InitRecording() { - assert(false); - return 0; - } - virtual bool RecordingIsInitialized() const { - assert(false); - return true; - } - virtual int32_t StartPlayout() { - assert(false); - return 0; - } - virtual bool Playing() const { - assert(false); - return false; - } - virtual int32_t StartRecording() { - assert(false); - return 0; - } - virtual bool Recording() const { - assert(false); - return false; - } - virtual bool AGC() const { - assert(false); - return true; - } + virtual int32_t PlayoutIsAvailable(bool* available) { return 0; } + virtual int32_t InitPlayout() { return 0; } + virtual bool PlayoutIsInitialized() const { return true; } + virtual int32_t RecordingIsAvailable(bool* available) { return 0; } + virtual int32_t InitRecording() { return 0; } + virtual bool RecordingIsInitialized() const { return true; } + virtual int32_t StartPlayout() { return 0; } + virtual bool Playing() const { return false; } + virtual int32_t StartRecording() { return 0; } + virtual bool Recording() const { return false; } + virtual bool AGC() const { return true; } virtual int32_t SetWaveOutVolume(uint16_t volumeLeft, uint16_t volumeRight) { - assert(false); return 0; } virtual int32_t WaveOutVolume(uint16_t* volumeLeft, uint16_t* volumeRight) const { - assert(false); - return 0; - } - virtual bool SpeakerIsInitialized() const { - assert(false); - return true; - } - virtual bool MicrophoneIsInitialized() const { - assert(false); - return true; - } - virtual int32_t SpeakerVolumeIsAvailable(bool* available) { - assert(false); - return 0; - } - virtual int32_t SetSpeakerVolume(uint32_t volume) { - assert(false); - return 0; - } - virtual int32_t SpeakerVolume(uint32_t* volume) const { - assert(false); - return 0; - } - virtual int32_t MaxSpeakerVolume(uint32_t* maxVolume) const { - assert(false); - return 0; - } - virtual int32_t MinSpeakerVolume(uint32_t* minVolume) const { - assert(false); - return 0; - } - virtual int32_t SpeakerVolumeStepSize(uint16_t* stepSize) const { - assert(false); - return 0; - } - virtual int32_t MicrophoneVolumeIsAvailable(bool* available) { - assert(false); - return 0; - } - virtual int32_t SetMicrophoneVolume(uint32_t volume) { - assert(false); - return 0; - } - virtual int32_t MicrophoneVolume(uint32_t* volume) const { - assert(false); - return 0; - } - virtual int32_t MaxMicrophoneVolume(uint32_t* maxVolume) const { - assert(false); - return 0; - } - virtual int32_t MinMicrophoneVolume(uint32_t* minVolume) const { - assert(false); return 0; } + virtual bool SpeakerIsInitialized() const { return true; } + virtual bool MicrophoneIsInitialized() const { return true; } + virtual int32_t SpeakerVolumeIsAvailable(bool* available) { return 0; } + virtual int32_t SetSpeakerVolume(uint32_t volume) { return 0; } + virtual int32_t SpeakerVolume(uint32_t* volume) const { return 0; } + virtual int32_t MaxSpeakerVolume(uint32_t* maxVolume) const { return 0; } + virtual int32_t MinSpeakerVolume(uint32_t* minVolume) const { return 0; } + virtual int32_t SpeakerVolumeStepSize(uint16_t* stepSize) const { return 0; } + virtual int32_t MicrophoneVolumeIsAvailable(bool* available) { return 0; } + virtual int32_t SetMicrophoneVolume(uint32_t volume) { return 0; } + virtual int32_t MicrophoneVolume(uint32_t* volume) const { return 0; } + virtual int32_t MaxMicrophoneVolume(uint32_t* maxVolume) const { return 0; } + virtual int32_t MinMicrophoneVolume(uint32_t* minVolume) const { return 0; } virtual int32_t MicrophoneVolumeStepSize(uint16_t* stepSize) const { - assert(false); - return 0; - } - virtual int32_t SpeakerMuteIsAvailable(bool* available) { - assert(false); - return 0; - } - virtual int32_t SetSpeakerMute(bool enable) { - assert(false); - return 0; - } - virtual int32_t SpeakerMute(bool* enabled) const { - assert(false); - return 0; - } - virtual int32_t MicrophoneMuteIsAvailable(bool* available) { - assert(false); - return 0; - } - virtual int32_t SetMicrophoneMute(bool enable) { - assert(false); - return 0; - } - virtual int32_t MicrophoneMute(bool* enabled) const { - assert(false); - return 0; - } - virtual int32_t MicrophoneBoostIsAvailable(bool* available) { - assert(false); - return 0; - } - virtual int32_t SetMicrophoneBoost(bool enable) { - assert(false); - return 0; - } - virtual int32_t MicrophoneBoost(bool* enabled) const { - assert(false); return 0; } + virtual int32_t SpeakerMuteIsAvailable(bool* available) { return 0; } + virtual int32_t SetSpeakerMute(bool enable) { return 0; } + virtual int32_t SpeakerMute(bool* enabled) const { return 0; } + virtual int32_t MicrophoneMuteIsAvailable(bool* available) { return 0; } + virtual int32_t SetMicrophoneMute(bool enable) { return 0; } + virtual int32_t MicrophoneMute(bool* enabled) const { return 0; } + virtual int32_t MicrophoneBoostIsAvailable(bool* available) { return 0; } + virtual int32_t SetMicrophoneBoost(bool enable) { return 0; } + virtual int32_t MicrophoneBoost(bool* enabled) const { return 0; } virtual int32_t StereoPlayoutIsAvailable(bool* available) const { *available = false; return 0; } - virtual int32_t StereoPlayout(bool* enabled) const { - assert(false); - return 0; - } + virtual int32_t StereoPlayout(bool* enabled) const { return 0; } virtual int32_t StereoRecordingIsAvailable(bool* available) const { *available = false; return 0; } - virtual int32_t StereoRecording(bool* enabled) const { - assert(false); - return 0; - } - virtual int32_t SetRecordingChannel(const ChannelType channel) { - assert(false); - return 0; - } - virtual int32_t RecordingChannel(ChannelType* channel) const { - assert(false); - return 0; - } + virtual int32_t StereoRecording(bool* enabled) const { return 0; } + virtual int32_t SetRecordingChannel(const ChannelType channel) { return 0; } + virtual int32_t RecordingChannel(ChannelType* channel) const { return 0; } virtual int32_t SetPlayoutBuffer(const BufferType type, uint16_t sizeMS = 0) { - assert(false); return 0; } virtual int32_t PlayoutBuffer(BufferType* type, uint16_t* sizeMS) const { - assert(false); - return 0; - } - virtual int32_t PlayoutDelay(uint16_t* delayMS) const { - assert(false); - return 0; - } - virtual int32_t RecordingDelay(uint16_t* delayMS) const { - assert(false); - return 0; - } - virtual int32_t CPULoad(uint16_t* load) const { - assert(false); return 0; } + virtual int32_t PlayoutDelay(uint16_t* delayMS) const { return 0; } + virtual int32_t RecordingDelay(uint16_t* delayMS) const { return 0; } + virtual int32_t CPULoad(uint16_t* load) const { return 0; } virtual int32_t StartRawOutputFileRecording( const char pcmFileNameUTF8[kAdmMaxFileNameSize]) { - assert(false); - return 0; - } - virtual int32_t StopRawOutputFileRecording() { - assert(false); return 0; } + virtual int32_t StopRawOutputFileRecording() { return 0; } virtual int32_t StartRawInputFileRecording( const char pcmFileNameUTF8[kAdmMaxFileNameSize]) { - assert(false); - return 0; - } - virtual int32_t StopRawInputFileRecording() { - assert(false); return 0; } + virtual int32_t StopRawInputFileRecording() { return 0; } virtual int32_t SetRecordingSampleRate(const uint32_t samplesPerSec) { - assert(false); return 0; } virtual int32_t RecordingSampleRate(uint32_t* samplesPerSec) const { - assert(false); return 0; } virtual int32_t SetPlayoutSampleRate(const uint32_t samplesPerSec) { - assert(false); return 0; } - virtual int32_t PlayoutSampleRate(uint32_t* samplesPerSec) const { - assert(false); - return 0; - } - virtual int32_t ResetAudioDevice() { - assert(false); - return 0; - } - virtual int32_t SetLoudspeakerStatus(bool enable) { - assert(false); - return 0; - } - virtual int32_t GetLoudspeakerStatus(bool* enabled) const { - assert(false); - return 0; - } - virtual int32_t EnableBuiltInAEC(bool enable) { - assert(false); - return -1; - } - virtual bool BuiltInAECIsEnabled() const { - assert(false); - return false; - } + virtual int32_t PlayoutSampleRate(uint32_t* samplesPerSec) const { return 0; } + virtual int32_t ResetAudioDevice() { return 0; } + virtual int32_t SetLoudspeakerStatus(bool enable) { return 0; } + virtual int32_t GetLoudspeakerStatus(bool* enabled) const { return 0; } + virtual int32_t EnableBuiltInAEC(bool enable) { return -1; } + virtual bool BuiltInAECIsEnabled() const { return false; } }; } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc b/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc index 6f9e35f34847..facc3714c952 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc @@ -62,7 +62,6 @@ static const unsigned int ALSA_PLAYOUT_LATENCY = 40*1000; // in us static const unsigned int ALSA_CAPTURE_FREQ = 48000; static const unsigned int ALSA_CAPTURE_CH = 2; static const unsigned int ALSA_CAPTURE_LATENCY = 40*1000; // in us -static const unsigned int ALSA_PLAYOUT_WAIT_TIMEOUT = 5; // in ms static const unsigned int ALSA_CAPTURE_WAIT_TIMEOUT = 5; // in ms #define FUNC_GET_NUM_OF_DEVICE 0 @@ -130,7 +129,7 @@ AudioDeviceLinuxALSA::~AudioDeviceLinuxALSA() { WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s destroyed", __FUNCTION__); - + Terminate(); // Clean up the recording buffer and playout buffer. @@ -427,7 +426,7 @@ int32_t AudioDeviceLinuxALSA::SpeakerVolume(uint32_t& volume) const } volume = level; - + return 0; } @@ -463,7 +462,7 @@ int32_t AudioDeviceLinuxALSA::MaxSpeakerVolume( } maxVolume = maxVol; - + return 0; } @@ -479,7 +478,7 @@ int32_t AudioDeviceLinuxALSA::MinSpeakerVolume( } minVolume = minVol; - + return 0; } @@ -487,8 +486,8 @@ int32_t AudioDeviceLinuxALSA::SpeakerVolumeStepSize( uint16_t& stepSize) const { - uint16_t delta(0); - + uint16_t delta(0); + if (_mixerManager.SpeakerVolumeStepSize(delta) == -1) { return -1; @@ -539,15 +538,15 @@ int32_t AudioDeviceLinuxALSA::SetSpeakerMute(bool enable) int32_t AudioDeviceLinuxALSA::SpeakerMute(bool& enabled) const { - bool muted(0); - + bool muted(0); + if (_mixerManager.SpeakerMute(muted) == -1) { return -1; } enabled = muted; - + return 0; } @@ -596,8 +595,8 @@ int32_t AudioDeviceLinuxALSA::SetMicrophoneMute(bool enable) int32_t AudioDeviceLinuxALSA::MicrophoneMute(bool& enabled) const { - bool muted(0); - + bool muted(0); + if (_mixerManager.MicrophoneMute(muted) == -1) { return -1; @@ -609,7 +608,7 @@ int32_t AudioDeviceLinuxALSA::MicrophoneMute(bool& enabled) const int32_t AudioDeviceLinuxALSA::MicrophoneBoostIsAvailable(bool& available) { - + bool isAvailable(false); bool wasInitialized = _mixerManager.MicrophoneIsInitialized(); @@ -647,15 +646,15 @@ int32_t AudioDeviceLinuxALSA::SetMicrophoneBoost(bool enable) int32_t AudioDeviceLinuxALSA::MicrophoneBoost(bool& enabled) const { - bool onOff(0); - + bool onOff(0); + if (_mixerManager.MicrophoneBoost(onOff) == -1) { return -1; } enabled = onOff; - + return 0; } @@ -677,7 +676,7 @@ int32_t AudioDeviceLinuxALSA::StereoRecordingIsAvailable(bool& available) int recChannels = _recChannels; available = false; - + // Stop/uninitialize recording if initialized (and possibly started) if (_recIsInitialized) { @@ -748,7 +747,7 @@ int32_t AudioDeviceLinuxALSA::StereoPlayoutIsAvailable(bool& available) int playChannels = _playChannels; available = false; - + // Stop/uninitialize recording if initialized (and possibly started) if (_playIsInitialized) { @@ -847,7 +846,7 @@ int32_t AudioDeviceLinuxALSA::SetMicrophoneVolume(uint32_t volume) { return (_mixerManager.SetMicrophoneVolume(volume)); - + return 0; } @@ -864,7 +863,7 @@ int32_t AudioDeviceLinuxALSA::MicrophoneVolume(uint32_t& volume) const } volume = level; - + return 0; } @@ -904,8 +903,8 @@ int32_t AudioDeviceLinuxALSA::MicrophoneVolumeStepSize( uint16_t& stepSize) const { - uint16_t delta(0); - + uint16_t delta(0); + if (_mixerManager.MicrophoneVolumeStepSize(delta) == -1) { return -1; @@ -997,7 +996,7 @@ int32_t AudioDeviceLinuxALSA::RecordingDeviceName( { memset(guid, 0, kAdmMaxGuidSize); } - + return GetDevicesInfo(1, false, index, name, kAdmMaxDeviceNameSize, guid, kAdmMaxGuidSize); } @@ -1047,7 +1046,7 @@ int32_t AudioDeviceLinuxALSA::SetRecordingDevice( int32_t AudioDeviceLinuxALSA::PlayoutIsAvailable(bool& available) { - + available = false; // Try to initialize the playout side with mono @@ -1072,13 +1071,13 @@ int32_t AudioDeviceLinuxALSA::PlayoutIsAvailable(bool& available) _playChannels = 2; } } - + return res; } int32_t AudioDeviceLinuxALSA::RecordingIsAvailable(bool& available) { - + available = false; // Try to initialize the recording side with mono @@ -1103,7 +1102,7 @@ int32_t AudioDeviceLinuxALSA::RecordingIsAvailable(bool& available) _recChannels = 2; } } - + return res; } @@ -1191,7 +1190,7 @@ int32_t AudioDeviceLinuxALSA::InitPlayout() _playoutFramesIn10MS = _playoutFreq/100; if ((errVal = LATE(snd_pcm_set_params)( _handlePlayout, -#if defined(WEBRTC_BIG_ENDIAN) +#if defined(WEBRTC_ARCH_BIG_ENDIAN) SND_PCM_FORMAT_S16_BE, #else SND_PCM_FORMAT_S16_LE, //format @@ -1347,7 +1346,7 @@ int32_t AudioDeviceLinuxALSA::InitRecording() _recordingFramesIn10MS = _recordingFreq/100; if ((errVal = LATE(snd_pcm_set_params)(_handleRecord, -#if defined(WEBRTC_BIG_ENDIAN) +#if defined(WEBRTC_ARCH_BIG_ENDIAN) SND_PCM_FORMAT_S16_BE, //format #else SND_PCM_FORMAT_S16_LE, //format @@ -1366,7 +1365,7 @@ int32_t AudioDeviceLinuxALSA::InitRecording() _recChannels = 1; if ((errVal = LATE(snd_pcm_set_params)(_handleRecord, -#if defined(WEBRTC_BIG_ENDIAN) +#if defined(WEBRTC_ARCH_BIG_ENDIAN) SND_PCM_FORMAT_S16_BE, //format #else SND_PCM_FORMAT_S16_LE, //format @@ -1777,11 +1776,11 @@ int32_t AudioDeviceLinuxALSA::PlayoutBuffer( type = _playBufType; if (type == AudioDeviceModule::kFixedBufferSize) { - sizeMS = _playBufDelayFixed; + sizeMS = _playBufDelayFixed; } else { - sizeMS = _playBufDelay; + sizeMS = _playBufDelay; } return 0; @@ -1797,41 +1796,49 @@ int32_t AudioDeviceLinuxALSA::CPULoad(uint16_t& load) const bool AudioDeviceLinuxALSA::PlayoutWarning() const { + CriticalSectionScoped lock(&_critSect); return (_playWarning > 0); } bool AudioDeviceLinuxALSA::PlayoutError() const { + CriticalSectionScoped lock(&_critSect); return (_playError > 0); } bool AudioDeviceLinuxALSA::RecordingWarning() const { + CriticalSectionScoped lock(&_critSect); return (_recWarning > 0); } bool AudioDeviceLinuxALSA::RecordingError() const { + CriticalSectionScoped lock(&_critSect); return (_recError > 0); } void AudioDeviceLinuxALSA::ClearPlayoutWarning() { + CriticalSectionScoped lock(&_critSect); _playWarning = 0; } void AudioDeviceLinuxALSA::ClearPlayoutError() { + CriticalSectionScoped lock(&_critSect); _playError = 0; } void AudioDeviceLinuxALSA::ClearRecordingWarning() { + CriticalSectionScoped lock(&_critSect); _recWarning = 0; } void AudioDeviceLinuxALSA::ClearRecordingError() { + CriticalSectionScoped lock(&_critSect); _recError = 0; } @@ -1848,7 +1855,7 @@ int32_t AudioDeviceLinuxALSA::GetDevicesInfo( char* enumDeviceId, const int32_t ediLen) const { - + // Device enumeration based on libjingle implementation // by Tristan Schmelcher at Google Inc. diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h b/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h index d4e430451822..6a9176d2089e 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h +++ b/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h @@ -178,8 +178,8 @@ private: bool KeyPressed() const; private: - void Lock() { _critSect.Enter(); }; - void UnLock() { _critSect.Leave(); }; + void Lock() EXCLUSIVE_LOCK_FUNCTION(_critSect) { _critSect.Enter(); }; + void UnLock() UNLOCK_FUNCTION(_critSect) { _critSect.Leave(); }; private: inline int32_t InputSanityCheckAfterUnlockedPeriod() const; inline int32_t OutputSanityCheckAfterUnlockedPeriod() const; diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc b/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc index 370635854cfb..43c74b4ac9c6 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc @@ -33,35 +33,6 @@ namespace webrtc // Static Methods // ============================================================================ -bool AudioDeviceLinuxPulse::PulseAudioIsSupported() -{ - WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, -1, "%s", - __FUNCTION__); - - bool pulseAudioIsSupported(true); - - // Check that we can initialize - AudioDeviceLinuxPulse* admPulse = new AudioDeviceLinuxPulse(-1); - if (admPulse->InitPulseAudio() == -1) - { - pulseAudioIsSupported = false; - } - admPulse->TerminatePulseAudio(); - delete admPulse; - - if (pulseAudioIsSupported) - { - WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1, - "*** Linux Pulse Audio is supported ***"); - } else - { - WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1, - "*** Linux Pulse Audio is NOT supported => will revert to the ALSA API ***"); - } - - return (pulseAudioIsSupported); -} - AudioDeviceLinuxPulse::AudioDeviceLinuxPulse(const int32_t id) : _ptrAudioBuffer(NULL), _critSect(*CriticalSectionWrapper::CreateCriticalSection()), @@ -2646,7 +2617,7 @@ int32_t AudioDeviceLinuxPulse::ReadRecordedData(const void* bufferData, int32_t AudioDeviceLinuxPulse::ProcessRecordedData( int8_t *bufferData, uint32_t bufferSizeInSamples, - uint32_t recDelay) + uint32_t recDelay) EXCLUSIVE_LOCKS_REQUIRED(_critSect) { uint32_t currentMicLevel(0); uint32_t newMicLevel(0); diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h b/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h index 26d711d18ce9..4804c7214258 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h +++ b/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h @@ -97,8 +97,6 @@ public: AudioDeviceLinuxPulse(const int32_t id); virtual ~AudioDeviceLinuxPulse(); - static bool PulseAudioIsSupported(); - // Retrieve the currently utilized audio layer virtual int32_t ActiveAudioLayer( AudioDeviceModule::AudioLayer& audioLayer) const OVERRIDE; @@ -228,16 +226,12 @@ public: virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) OVERRIDE; private: - void Lock() - { + void Lock() EXCLUSIVE_LOCK_FUNCTION(_critSect) { _critSect.Enter(); } - ; - void UnLock() - { + void UnLock() UNLOCK_FUNCTION(_critSect) { _critSect.Leave(); } - ; void WaitForOperationCompletion(pa_operation* paOperation) const; void WaitForSuccess(pa_operation* paOperation) const; diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/mac/audio_device_mac.cc b/media/webrtc/trunk/webrtc/modules/audio_device/mac/audio_device_mac.cc index 9da188013ca5..b07c94dd11c0 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/mac/audio_device_mac.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_device/mac/audio_device_mac.cc @@ -97,7 +97,7 @@ void AudioDeviceMac::logCAMsg(const TraceLevel level, assert(msg != NULL); assert(err != NULL); -#ifdef WEBRTC_BIG_ENDIAN +#ifdef WEBRTC_ARCH_BIG_ENDIAN WEBRTC_TRACE(level, module, id, "%s: %.4s", msg, err); #else // We need to flip the characters in this case. @@ -1457,7 +1457,7 @@ int32_t AudioDeviceMac::InitPlayout() _outDesiredFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked; -#ifdef WEBRTC_BIG_ENDIAN +#ifdef WEBRTC_ARCH_BIG_ENDIAN _outDesiredFormat.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian; #endif _outDesiredFormat.mFormatID = kAudioFormatLinearPCM; @@ -1681,7 +1681,7 @@ int32_t AudioDeviceMac::InitRecording() _inDesiredFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked; -#ifdef WEBRTC_BIG_ENDIAN +#ifdef WEBRTC_ARCH_BIG_ENDIAN _inDesiredFormat.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian; #endif _inDesiredFormat.mFormatID = kAudioFormatLinearPCM; diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/mac/audio_mixer_manager_mac.cc b/media/webrtc/trunk/webrtc/modules/audio_device/mac/audio_mixer_manager_mac.cc index 08e419750ea5..952dc11d8b91 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/mac/audio_mixer_manager_mac.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_device/mac/audio_mixer_manager_mac.cc @@ -1154,7 +1154,7 @@ void AudioMixerManagerMac::logCAMsg(const TraceLevel level, assert(msg != NULL); assert(err != NULL); -#ifdef WEBRTC_BIG_ENDIAN +#ifdef WEBRTC_ARCH_BIG_ENDIAN WEBRTC_TRACE(level, module, id, "%s: %.4s", msg, err); #else // We need to flip the characters in this case. diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/opensl/audio_manager_jni.cc b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/audio_manager_jni.cc deleted file mode 100644 index 10168c2a1171..000000000000 --- a/media/webrtc/trunk/webrtc/modules/audio_device/opensl/audio_manager_jni.cc +++ /dev/null @@ -1,5 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this file, - * You can obtain one at http://mozilla.org/MPL/2.0/. */ - -#include "../android/audio_manager_jni.cc" diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/shared/audio_device_jni_android.cc b/media/webrtc/trunk/webrtc/modules/audio_device/shared/audio_device_jni_android.cc deleted file mode 100644 index c1f56f199d60..000000000000 --- a/media/webrtc/trunk/webrtc/modules/audio_device/shared/audio_device_jni_android.cc +++ /dev/null @@ -1,5 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this file, - * You can obtain one at http://mozilla.org/MPL/2.0/. */ - -#include "../android/audio_device_jni_android.cc" diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/shared/audio_device_jni_android.h b/media/webrtc/trunk/webrtc/modules/audio_device/shared/audio_device_jni_android.h deleted file mode 100644 index 88184b221768..000000000000 --- a/media/webrtc/trunk/webrtc/modules/audio_device/shared/audio_device_jni_android.h +++ /dev/null @@ -1,5 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this file, - * You can obtain one at http://mozilla.org/MPL/2.0/. */ - -#include "../android/audio_device_jni_android.h" diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/test/audio_device_test_api.cc b/media/webrtc/trunk/webrtc/modules/audio_device/test/audio_device_test_api.cc index f1dc86a1b0e4..f15fc3780a8e 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/test/audio_device_test_api.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_device/test/audio_device_test_api.cc @@ -142,6 +142,10 @@ class AudioTransportAPI: public AudioTransport { return 0; } + virtual void OnData(int voe_channel, const void* audio_data, + int bits_per_sample, int sample_rate, + int number_of_channels, + int number_of_frames) {} private: uint32_t rec_count_; uint32_t play_count_; diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/test/func_test_manager.cc b/media/webrtc/trunk/webrtc/modules/audio_device/test/func_test_manager.cc index 8bc462d3c13c..a9c88cb0e9c7 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/test/func_test_manager.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_device/test/func_test_manager.cc @@ -37,15 +37,6 @@ const char* RecordedMicrophoneBoostFile = const char* RecordedMicrophoneAGCFile = "recorded_microphone_AGC_mono_48.pcm"; const char* RecordedSpeakerFile = "recorded_speaker_48.pcm"; -struct AudioPacket -{ - uint8_t dataBuffer[4 * 960]; - uint16_t nSamples; - uint16_t nBytesPerSample; - uint8_t nChannels; - uint32_t samplesPerSec; -}; - // Helper functions #if !defined(WEBRTC_IOS) char* GetFilename(char* filename) @@ -103,8 +94,7 @@ AudioTransportImpl::AudioTransportImpl(AudioDeviceModule* audioDevice) : _loopBackMeasurements(false), _playFile(*FileWrapper::Create()), _recCount(0), - _playCount(0), - _audioList() + _playCount(0) { _resampler.Reset(48000, 48000, kResamplerSynchronousStereo); } @@ -115,18 +105,9 @@ AudioTransportImpl::~AudioTransportImpl() _playFile.CloseFile(); delete &_playFile; - while (!_audioList.Empty()) - { - ListItem* item = _audioList.First(); - if (item) - { - AudioPacket* packet = static_cast (item->GetItem()); - if (packet) - { - delete packet; - } - } - _audioList.PopFront(); + for (AudioPacketList::iterator iter = _audioList.begin(); + iter != _audioList.end(); ++iter) { + delete *iter; } } @@ -152,19 +133,11 @@ void AudioTransportImpl::SetFullDuplex(bool enable) { _fullDuplex = enable; - while (!_audioList.Empty()) - { - ListItem* item = _audioList.First(); - if (item) - { - AudioPacket* packet = static_cast (item->GetItem()); - if (packet) - { - delete packet; - } - } - _audioList.PopFront(); + for (AudioPacketList::iterator iter = _audioList.begin(); + iter != _audioList.end(); ++iter) { + delete *iter; } + _audioList.clear(); } int32_t AudioTransportImpl::RecordedDataIsAvailable( @@ -179,7 +152,7 @@ int32_t AudioTransportImpl::RecordedDataIsAvailable( const bool keyPressed, uint32_t& newMicLevel) { - if (_fullDuplex && _audioList.GetSize() < 15) + if (_fullDuplex && _audioList.size() < 15) { AudioPacket* packet = new AudioPacket(); memcpy(packet->dataBuffer, audioSamples, nSamples * nBytesPerSample); @@ -187,7 +160,7 @@ int32_t AudioTransportImpl::RecordedDataIsAvailable( packet->nBytesPerSample = nBytesPerSample; packet->nChannels = nChannels; packet->samplesPerSec = samplesPerSec; - _audioList.PushBack(packet); + _audioList.push_back(packet); } _recCount++; @@ -323,14 +296,14 @@ int32_t AudioTransportImpl::NeedMorePlayData( { if (_fullDuplex) { - if (_audioList.Empty()) + if (_audioList.empty()) { // use zero stuffing when not enough data memset(audioSamples, 0, nBytesPerSample * nSamples); } else { - ListItem* item = _audioList.First(); - AudioPacket* packet = static_cast (item->GetItem()); + AudioPacket* packet = _audioList.front(); + _audioList.pop_front(); if (packet) { int ret(0); @@ -435,7 +408,6 @@ int32_t AudioTransportImpl::NeedMorePlayData( nSamplesOut = nSamples; delete packet; } - _audioList.PopFront(); } } // if (_fullDuplex) @@ -525,12 +497,12 @@ int32_t AudioTransportImpl::NeedMorePlayData( { uint16_t recDelayMS(0); uint16_t playDelayMS(0); - uint32_t nItemsInList(0); + size_t nItemsInList(0); - nItemsInList = _audioList.GetSize(); + nItemsInList = _audioList.size(); EXPECT_EQ(0, _audioDevice->RecordingDelay(&recDelayMS)); EXPECT_EQ(0, _audioDevice->PlayoutDelay(&playDelayMS)); - TEST_LOG("Delay (rec+play)+buf: %3u (%3u+%3u)+%3u [ms]\n", + TEST_LOG("Delay (rec+play)+buf: %3zu (%3u+%3u)+%3zu [ms]\n", recDelayMS + playDelayMS + 10 * (nItemsInList + 1), recDelayMS, playDelayMS, 10 * (nItemsInList + 1)); @@ -564,6 +536,12 @@ int AudioTransportImpl::OnDataAvailable(const int voe_channels[], return 0; } +void AudioTransportImpl::OnData(int voe_channel, + const void* audio_data, + int bits_per_sample, int sample_rate, + int number_of_channels, + int number_of_frames) {} + FuncTestManager::FuncTestManager() : _processThread(NULL), _audioDevice(NULL), diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/test/func_test_manager.h b/media/webrtc/trunk/webrtc/modules/audio_device/test/func_test_manager.h index f8bacb22a763..6e21466e6763 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/test/func_test_manager.h +++ b/media/webrtc/trunk/webrtc/modules/audio_device/test/func_test_manager.h @@ -13,13 +13,13 @@ #include "webrtc/modules/audio_device/audio_device_utility.h" +#include #include #include "webrtc/common_audio/resampler/include/resampler.h" #include "webrtc/modules/audio_device/include/audio_device.h" #include "webrtc/modules/audio_device/test/audio_device_test_defines.h" #include "webrtc/system_wrappers/interface/file_wrapper.h" -#include "webrtc/system_wrappers/interface/list_wrapper.h" #include "webrtc/typedefs.h" #if defined(WEBRTC_IOS) || defined(ANDROID) @@ -60,6 +60,15 @@ enum TestType TTTest = 66, }; +struct AudioPacket +{ + uint8_t dataBuffer[4 * 960]; + uint16_t nSamples; + uint16_t nBytesPerSample; + uint8_t nChannels; + uint32_t samplesPerSec; +}; + class ProcessThread; namespace webrtc @@ -122,6 +131,11 @@ public: bool key_pressed, bool need_audio_processing); + virtual void OnData(int voe_channel, const void* audio_data, + int bits_per_sample, int sample_rate, + int number_of_channels, + int number_of_frames); + AudioTransportImpl(AudioDeviceModule* audioDevice); ~AudioTransportImpl(); @@ -165,6 +179,7 @@ public: ; private: + typedef std::list AudioPacketList; AudioDeviceModule* _audioDevice; bool _playFromFile; @@ -181,8 +196,7 @@ private: uint32_t _recCount; uint32_t _playCount; - - ListWrapper _audioList; + AudioPacketList _audioList; Resampler _resampler; }; diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core.c b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core.c index 40e9f67d0dd2..7dda551f3d85 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core.c +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core.c @@ -31,16 +31,14 @@ // Buffer size (samples) static const size_t kBufSizePartitions = 250; // 1 second of audio in 16 kHz. -// Noise suppression -static const int converged = 250; - // Metrics static const int subCountLen = 4; static const int countLen = 50; // Quantities to control H band scaling for SWB input -static const int flagHbandCn = 1; // flag for adding comfort noise in H band -static const float cnScaleHband = (float)0.4; // scale for comfort noise in H band +static const int flagHbandCn = 1; // flag for adding comfort noise in H band +static const float cnScaleHband = + (float)0.4; // scale for comfort noise in H band // Initial bin for averaging nlp gain in low band static const int freqAvgIc = PART_LEN / 2; @@ -48,78 +46,68 @@ static const int freqAvgIc = PART_LEN / 2; // win = sqrt(hanning(63)); win = [0 ; win(1:32)]; // fprintf(1, '\t%.14f, %.14f, %.14f,\n', win); static const float sqrtHanning[65] = { - 0.00000000000000f, 0.02454122852291f, 0.04906767432742f, - 0.07356456359967f, 0.09801714032956f, 0.12241067519922f, - 0.14673047445536f, 0.17096188876030f, 0.19509032201613f, - 0.21910124015687f, 0.24298017990326f, 0.26671275747490f, - 0.29028467725446f, 0.31368174039889f, 0.33688985339222f, - 0.35989503653499f, 0.38268343236509f, 0.40524131400499f, - 0.42755509343028f, 0.44961132965461f, 0.47139673682600f, - 0.49289819222978f, 0.51410274419322f, 0.53499761988710f, - 0.55557023301960f, 0.57580819141785f, 0.59569930449243f, - 0.61523159058063f, 0.63439328416365f, 0.65317284295378f, - 0.67155895484702f, 0.68954054473707f, 0.70710678118655f, - 0.72424708295147f, 0.74095112535496f, 0.75720884650648f, - 0.77301045336274f, 0.78834642762661f, 0.80320753148064f, - 0.81758481315158f, 0.83146961230255f, 0.84485356524971f, - 0.85772861000027f, 0.87008699110871f, 0.88192126434835f, - 0.89322430119552f, 0.90398929312344f, 0.91420975570353f, - 0.92387953251129f, 0.93299279883474f, 0.94154406518302f, - 0.94952818059304f, 0.95694033573221f, 0.96377606579544f, - 0.97003125319454f, 0.97570213003853f, 0.98078528040323f, - 0.98527764238894f, 0.98917650996478f, 0.99247953459871f, - 0.99518472667220f, 0.99729045667869f, 0.99879545620517f, - 0.99969881869620f, 1.00000000000000f -}; + 0.00000000000000f, 0.02454122852291f, 0.04906767432742f, 0.07356456359967f, + 0.09801714032956f, 0.12241067519922f, 0.14673047445536f, 0.17096188876030f, + 0.19509032201613f, 0.21910124015687f, 0.24298017990326f, 0.26671275747490f, + 0.29028467725446f, 0.31368174039889f, 0.33688985339222f, 0.35989503653499f, + 0.38268343236509f, 0.40524131400499f, 0.42755509343028f, 0.44961132965461f, + 0.47139673682600f, 0.49289819222978f, 0.51410274419322f, 0.53499761988710f, + 0.55557023301960f, 0.57580819141785f, 0.59569930449243f, 0.61523159058063f, + 0.63439328416365f, 0.65317284295378f, 0.67155895484702f, 0.68954054473707f, + 0.70710678118655f, 0.72424708295147f, 0.74095112535496f, 0.75720884650648f, + 0.77301045336274f, 0.78834642762661f, 0.80320753148064f, 0.81758481315158f, + 0.83146961230255f, 0.84485356524971f, 0.85772861000027f, 0.87008699110871f, + 0.88192126434835f, 0.89322430119552f, 0.90398929312344f, 0.91420975570353f, + 0.92387953251129f, 0.93299279883474f, 0.94154406518302f, 0.94952818059304f, + 0.95694033573221f, 0.96377606579544f, 0.97003125319454f, 0.97570213003853f, + 0.98078528040323f, 0.98527764238894f, 0.98917650996478f, 0.99247953459871f, + 0.99518472667220f, 0.99729045667869f, 0.99879545620517f, 0.99969881869620f, + 1.00000000000000f}; // Matlab code to produce table: // weightCurve = [0 ; 0.3 * sqrt(linspace(0,1,64))' + 0.1]; // fprintf(1, '\t%.4f, %.4f, %.4f, %.4f, %.4f, %.4f,\n', weightCurve); const float WebRtcAec_weightCurve[65] = { - 0.0000f, 0.1000f, 0.1378f, 0.1535f, 0.1655f, 0.1756f, - 0.1845f, 0.1926f, 0.2000f, 0.2069f, 0.2134f, 0.2195f, - 0.2254f, 0.2309f, 0.2363f, 0.2414f, 0.2464f, 0.2512f, - 0.2558f, 0.2604f, 0.2648f, 0.2690f, 0.2732f, 0.2773f, - 0.2813f, 0.2852f, 0.2890f, 0.2927f, 0.2964f, 0.3000f, - 0.3035f, 0.3070f, 0.3104f, 0.3138f, 0.3171f, 0.3204f, - 0.3236f, 0.3268f, 0.3299f, 0.3330f, 0.3360f, 0.3390f, - 0.3420f, 0.3449f, 0.3478f, 0.3507f, 0.3535f, 0.3563f, - 0.3591f, 0.3619f, 0.3646f, 0.3673f, 0.3699f, 0.3726f, - 0.3752f, 0.3777f, 0.3803f, 0.3828f, 0.3854f, 0.3878f, - 0.3903f, 0.3928f, 0.3952f, 0.3976f, 0.4000f -}; + 0.0000f, 0.1000f, 0.1378f, 0.1535f, 0.1655f, 0.1756f, 0.1845f, 0.1926f, + 0.2000f, 0.2069f, 0.2134f, 0.2195f, 0.2254f, 0.2309f, 0.2363f, 0.2414f, + 0.2464f, 0.2512f, 0.2558f, 0.2604f, 0.2648f, 0.2690f, 0.2732f, 0.2773f, + 0.2813f, 0.2852f, 0.2890f, 0.2927f, 0.2964f, 0.3000f, 0.3035f, 0.3070f, + 0.3104f, 0.3138f, 0.3171f, 0.3204f, 0.3236f, 0.3268f, 0.3299f, 0.3330f, + 0.3360f, 0.3390f, 0.3420f, 0.3449f, 0.3478f, 0.3507f, 0.3535f, 0.3563f, + 0.3591f, 0.3619f, 0.3646f, 0.3673f, 0.3699f, 0.3726f, 0.3752f, 0.3777f, + 0.3803f, 0.3828f, 0.3854f, 0.3878f, 0.3903f, 0.3928f, 0.3952f, 0.3976f, + 0.4000f}; // Matlab code to produce table: // overDriveCurve = [sqrt(linspace(0,1,65))' + 1]; // fprintf(1, '\t%.4f, %.4f, %.4f, %.4f, %.4f, %.4f,\n', overDriveCurve); const float WebRtcAec_overDriveCurve[65] = { - 1.0000f, 1.1250f, 1.1768f, 1.2165f, 1.2500f, 1.2795f, - 1.3062f, 1.3307f, 1.3536f, 1.3750f, 1.3953f, 1.4146f, - 1.4330f, 1.4507f, 1.4677f, 1.4841f, 1.5000f, 1.5154f, - 1.5303f, 1.5449f, 1.5590f, 1.5728f, 1.5863f, 1.5995f, - 1.6124f, 1.6250f, 1.6374f, 1.6495f, 1.6614f, 1.6731f, - 1.6847f, 1.6960f, 1.7071f, 1.7181f, 1.7289f, 1.7395f, - 1.7500f, 1.7603f, 1.7706f, 1.7806f, 1.7906f, 1.8004f, - 1.8101f, 1.8197f, 1.8292f, 1.8385f, 1.8478f, 1.8570f, - 1.8660f, 1.8750f, 1.8839f, 1.8927f, 1.9014f, 1.9100f, - 1.9186f, 1.9270f, 1.9354f, 1.9437f, 1.9520f, 1.9601f, - 1.9682f, 1.9763f, 1.9843f, 1.9922f, 2.0000f -}; + 1.0000f, 1.1250f, 1.1768f, 1.2165f, 1.2500f, 1.2795f, 1.3062f, 1.3307f, + 1.3536f, 1.3750f, 1.3953f, 1.4146f, 1.4330f, 1.4507f, 1.4677f, 1.4841f, + 1.5000f, 1.5154f, 1.5303f, 1.5449f, 1.5590f, 1.5728f, 1.5863f, 1.5995f, + 1.6124f, 1.6250f, 1.6374f, 1.6495f, 1.6614f, 1.6731f, 1.6847f, 1.6960f, + 1.7071f, 1.7181f, 1.7289f, 1.7395f, 1.7500f, 1.7603f, 1.7706f, 1.7806f, + 1.7906f, 1.8004f, 1.8101f, 1.8197f, 1.8292f, 1.8385f, 1.8478f, 1.8570f, + 1.8660f, 1.8750f, 1.8839f, 1.8927f, 1.9014f, 1.9100f, 1.9186f, 1.9270f, + 1.9354f, 1.9437f, 1.9520f, 1.9601f, 1.9682f, 1.9763f, 1.9843f, 1.9922f, + 2.0000f}; // Target suppression levels for nlp modes. // log{0.001, 0.00001, 0.00000001} -static const float kTargetSupp[3] = { -6.9f, -11.5f, -18.4f }; +static const float kTargetSupp[3] = {-6.9f, -11.5f, -18.4f}; // Two sets of parameters, one for the extended filter mode. -static const float kExtendedMinOverDrive[3] = { 3.0f, 6.0f, 15.0f }; -static const float kNormalMinOverDrive[3] = { 1.0f, 2.0f, 5.0f }; -static const float kExtendedSmoothingCoefficients[2][2] = - { { 0.9f, 0.1f }, { 0.92f, 0.08f } }; -static const float kNormalSmoothingCoefficients[2][2] = - { { 0.9f, 0.1f }, { 0.93f, 0.07f } }; +static const float kExtendedMinOverDrive[3] = {3.0f, 6.0f, 15.0f}; +static const float kNormalMinOverDrive[3] = {1.0f, 2.0f, 5.0f}; +static const float kExtendedSmoothingCoefficients[2][2] = {{0.9f, 0.1f}, + {0.92f, 0.08f}}; +static const float kNormalSmoothingCoefficients[2][2] = {{0.9f, 0.1f}, + {0.93f, 0.07f}}; // Number of partitions forming the NLP's "preferred" bands. -enum { kPrefBandSize = 24 }; +enum { + kPrefBandSize = 24 +}; #ifdef WEBRTC_AEC_DEBUG_DUMP extern int webrtc_aec_instance_count; @@ -128,14 +116,16 @@ extern int webrtc_aec_instance_count; // "Private" function prototypes. static void ProcessBlock(AecCore* aec); -static void NonLinearProcessing(AecCore* aec, short *output, short *outputH); +static void NonLinearProcessing(AecCore* aec, short* output, short* outputH); -static void GetHighbandGain(const float *lambda, float *nlpGainHband); +static void GetHighbandGain(const float* lambda, float* nlpGainHband); // Comfort_noise also computes noise for H band returned in comfortNoiseHband -static void ComfortNoise(AecCore* aec, float efw[2][PART_LEN1], - complex_t *comfortNoiseHband, - const float *noisePow, const float *lambda); +static void ComfortNoise(AecCore* aec, + float efw[2][PART_LEN1], + complex_t* comfortNoiseHband, + const float* noisePow, + const float* lambda); static void InitLevel(PowerLevel* level); static void InitStats(Stats* stats); @@ -148,148 +138,137 @@ static void TimeToFrequency(float time_data[PART_LEN2], float freq_data[2][PART_LEN1], int window); -__inline static float MulRe(float aRe, float aIm, float bRe, float bIm) -{ - return aRe * bRe - aIm * bIm; +__inline static float MulRe(float aRe, float aIm, float bRe, float bIm) { + return aRe * bRe - aIm * bIm; } -__inline static float MulIm(float aRe, float aIm, float bRe, float bIm) -{ - return aRe * bIm + aIm * bRe; +__inline static float MulIm(float aRe, float aIm, float bRe, float bIm) { + return aRe * bIm + aIm * bRe; } -static int CmpFloat(const void *a, const void *b) -{ - const float *da = (const float *)a; - const float *db = (const float *)b; +static int CmpFloat(const void* a, const void* b) { + const float* da = (const float*)a; + const float* db = (const float*)b; - return (*da > *db) - (*da < *db); + return (*da > *db) - (*da < *db); } -int WebRtcAec_CreateAec(AecCore** aecInst) -{ - AecCore* aec = malloc(sizeof(AecCore)); - *aecInst = aec; - if (aec == NULL) { - return -1; - } +int WebRtcAec_CreateAec(AecCore** aecInst) { + AecCore* aec = malloc(sizeof(AecCore)); + *aecInst = aec; + if (aec == NULL) { + return -1; + } - aec->nearFrBuf = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, - sizeof(int16_t)); - if (!aec->nearFrBuf) { - WebRtcAec_FreeAec(aec); - aec = NULL; - return -1; - } + aec->nearFrBuf = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(int16_t)); + if (!aec->nearFrBuf) { + WebRtcAec_FreeAec(aec); + aec = NULL; + return -1; + } - aec->outFrBuf = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, - sizeof(int16_t)); - if (!aec->outFrBuf) { - WebRtcAec_FreeAec(aec); - aec = NULL; - return -1; - } + aec->outFrBuf = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(int16_t)); + if (!aec->outFrBuf) { + WebRtcAec_FreeAec(aec); + aec = NULL; + return -1; + } - aec->nearFrBufH = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, - sizeof(int16_t)); - if (!aec->nearFrBufH) { - WebRtcAec_FreeAec(aec); - aec = NULL; - return -1; - } + aec->nearFrBufH = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(int16_t)); + if (!aec->nearFrBufH) { + WebRtcAec_FreeAec(aec); + aec = NULL; + return -1; + } - aec->outFrBufH = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, - sizeof(int16_t)); - if (!aec->outFrBufH) { - WebRtcAec_FreeAec(aec); - aec = NULL; - return -1; - } + aec->outFrBufH = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(int16_t)); + if (!aec->outFrBufH) { + WebRtcAec_FreeAec(aec); + aec = NULL; + return -1; + } - // Create far-end buffers. - aec->far_buf = WebRtc_CreateBuffer(kBufSizePartitions, - sizeof(float) * 2 * PART_LEN1); - if (!aec->far_buf) { - WebRtcAec_FreeAec(aec); - aec = NULL; - return -1; - } - aec->far_buf_windowed = WebRtc_CreateBuffer(kBufSizePartitions, - sizeof(float) * 2 * PART_LEN1); - if (!aec->far_buf_windowed) { - WebRtcAec_FreeAec(aec); - aec = NULL; - return -1; - } + // Create far-end buffers. + aec->far_buf = + WebRtc_CreateBuffer(kBufSizePartitions, sizeof(float) * 2 * PART_LEN1); + if (!aec->far_buf) { + WebRtcAec_FreeAec(aec); + aec = NULL; + return -1; + } + aec->far_buf_windowed = + WebRtc_CreateBuffer(kBufSizePartitions, sizeof(float) * 2 * PART_LEN1); + if (!aec->far_buf_windowed) { + WebRtcAec_FreeAec(aec); + aec = NULL; + return -1; + } #ifdef WEBRTC_AEC_DEBUG_DUMP - aec->far_time_buf = WebRtc_CreateBuffer(kBufSizePartitions, - sizeof(int16_t) * PART_LEN); - if (!aec->far_time_buf) { - WebRtcAec_FreeAec(aec); - aec = NULL; - return -1; - } - { - char filename[64]; - sprintf(filename, "aec_far%d.pcm", webrtc_aec_instance_count); - aec->farFile = fopen(filename, "wb"); - sprintf(filename, "aec_near%d.pcm", webrtc_aec_instance_count); - aec->nearFile = fopen(filename, "wb"); - sprintf(filename, "aec_out%d.pcm", webrtc_aec_instance_count); - aec->outFile = fopen(filename, "wb"); - sprintf(filename, "aec_out_linear%d.pcm", webrtc_aec_instance_count); - aec->outLinearFile = fopen(filename, "wb"); - } + aec->far_time_buf = + WebRtc_CreateBuffer(kBufSizePartitions, sizeof(int16_t) * PART_LEN); + if (!aec->far_time_buf) { + WebRtcAec_FreeAec(aec); + aec = NULL; + return -1; + } + { + char filename[64]; + sprintf(filename, "aec_far%d.pcm", webrtc_aec_instance_count); + aec->farFile = fopen(filename, "wb"); + sprintf(filename, "aec_near%d.pcm", webrtc_aec_instance_count); + aec->nearFile = fopen(filename, "wb"); + sprintf(filename, "aec_out%d.pcm", webrtc_aec_instance_count); + aec->outFile = fopen(filename, "wb"); + sprintf(filename, "aec_out_linear%d.pcm", webrtc_aec_instance_count); + aec->outLinearFile = fopen(filename, "wb"); + } #endif - aec->delay_estimator_farend = - WebRtc_CreateDelayEstimatorFarend(PART_LEN1, kHistorySizeBlocks); - if (aec->delay_estimator_farend == NULL) { - WebRtcAec_FreeAec(aec); - aec = NULL; - return -1; - } - aec->delay_estimator = - WebRtc_CreateDelayEstimator(aec->delay_estimator_farend, - kLookaheadBlocks); - if (aec->delay_estimator == NULL) { - WebRtcAec_FreeAec(aec); - aec = NULL; - return -1; - } + aec->delay_estimator_farend = + WebRtc_CreateDelayEstimatorFarend(PART_LEN1, kHistorySizeBlocks); + if (aec->delay_estimator_farend == NULL) { + WebRtcAec_FreeAec(aec); + aec = NULL; + return -1; + } + aec->delay_estimator = WebRtc_CreateDelayEstimator( + aec->delay_estimator_farend, kLookaheadBlocks); + if (aec->delay_estimator == NULL) { + WebRtcAec_FreeAec(aec); + aec = NULL; + return -1; + } - return 0; + return 0; } -int WebRtcAec_FreeAec(AecCore* aec) -{ - if (aec == NULL) { - return -1; - } +int WebRtcAec_FreeAec(AecCore* aec) { + if (aec == NULL) { + return -1; + } - WebRtc_FreeBuffer(aec->nearFrBuf); - WebRtc_FreeBuffer(aec->outFrBuf); + WebRtc_FreeBuffer(aec->nearFrBuf); + WebRtc_FreeBuffer(aec->outFrBuf); - WebRtc_FreeBuffer(aec->nearFrBufH); - WebRtc_FreeBuffer(aec->outFrBufH); + WebRtc_FreeBuffer(aec->nearFrBufH); + WebRtc_FreeBuffer(aec->outFrBufH); - WebRtc_FreeBuffer(aec->far_buf); - WebRtc_FreeBuffer(aec->far_buf_windowed); + WebRtc_FreeBuffer(aec->far_buf); + WebRtc_FreeBuffer(aec->far_buf_windowed); #ifdef WEBRTC_AEC_DEBUG_DUMP - WebRtc_FreeBuffer(aec->far_time_buf); - fclose(aec->farFile); - fclose(aec->nearFile); - fclose(aec->outFile); - fclose(aec->outLinearFile); + WebRtc_FreeBuffer(aec->far_time_buf); + fclose(aec->farFile); + fclose(aec->nearFile); + fclose(aec->outFile); + fclose(aec->outLinearFile); #endif - WebRtc_FreeDelayEstimator(aec->delay_estimator); - WebRtc_FreeDelayEstimatorFarend(aec->delay_estimator_farend); + WebRtc_FreeDelayEstimator(aec->delay_estimator); + WebRtc_FreeDelayEstimatorFarend(aec->delay_estimator_farend); - free(aec); - return 0; + free(aec); + return 0; } -static void FilterFar(AecCore* aec, float yf[2][PART_LEN1]) -{ +static void FilterFar(AecCore* aec, float yf[2][PART_LEN1]) { int i; for (i = 0; i < aec->num_partitions; i++) { int j; @@ -297,23 +276,27 @@ static void FilterFar(AecCore* aec, float yf[2][PART_LEN1]) int pos = i * PART_LEN1; // Check for wrap if (i + aec->xfBufBlockPos >= aec->num_partitions) { - xPos -= aec->num_partitions*(PART_LEN1); + xPos -= aec->num_partitions * (PART_LEN1); } for (j = 0; j < PART_LEN1; j++) { - yf[0][j] += MulRe(aec->xfBuf[0][xPos + j], aec->xfBuf[1][xPos + j], - aec->wfBuf[0][ pos + j], aec->wfBuf[1][ pos + j]); - yf[1][j] += MulIm(aec->xfBuf[0][xPos + j], aec->xfBuf[1][xPos + j], - aec->wfBuf[0][ pos + j], aec->wfBuf[1][ pos + j]); + yf[0][j] += MulRe(aec->xfBuf[0][xPos + j], + aec->xfBuf[1][xPos + j], + aec->wfBuf[0][pos + j], + aec->wfBuf[1][pos + j]); + yf[1][j] += MulIm(aec->xfBuf[0][xPos + j], + aec->xfBuf[1][xPos + j], + aec->wfBuf[0][pos + j], + aec->wfBuf[1][pos + j]); } } } -static void ScaleErrorSignal(AecCore* aec, float ef[2][PART_LEN1]) -{ +static void ScaleErrorSignal(AecCore* aec, float ef[2][PART_LEN1]) { const float mu = aec->extended_filter_enabled ? kExtendedMu : aec->normal_mu; - const float error_threshold = aec->extended_filter_enabled ? - kExtendedErrorThreshold : aec->normal_error_threshold; + const float error_threshold = aec->extended_filter_enabled + ? kExtendedErrorThreshold + : aec->normal_error_threshold; int i; float abs_ef; for (i = 0; i < (PART_LEN1); i++) { @@ -335,7 +318,7 @@ static void ScaleErrorSignal(AecCore* aec, float ef[2][PART_LEN1]) // Time-unconstrined filter adaptation. // TODO(andrew): consider for a low-complexity mode. -//static void FilterAdaptationUnconstrained(AecCore* aec, float *fft, +// static void FilterAdaptationUnconstrained(AecCore* aec, float *fft, // float ef[2][PART_LEN1]) { // int i, j; // for (i = 0; i < aec->num_partitions; i++) { @@ -359,10 +342,10 @@ static void ScaleErrorSignal(AecCore* aec, float ef[2][PART_LEN1]) // } //} -static void FilterAdaptation(AecCore* aec, float *fft, float ef[2][PART_LEN1]) { +static void FilterAdaptation(AecCore* aec, float* fft, float ef[2][PART_LEN1]) { int i, j; for (i = 0; i < aec->num_partitions; i++) { - int xPos = (i + aec->xfBufBlockPos)*(PART_LEN1); + int xPos = (i + aec->xfBufBlockPos) * (PART_LEN1); int pos; // Check for wrap if (i + aec->xfBufBlockPos >= aec->num_partitions) { @@ -375,14 +358,17 @@ static void FilterAdaptation(AecCore* aec, float *fft, float ef[2][PART_LEN1]) { fft[2 * j] = MulRe(aec->xfBuf[0][xPos + j], -aec->xfBuf[1][xPos + j], - ef[0][j], ef[1][j]); + ef[0][j], + ef[1][j]); fft[2 * j + 1] = MulIm(aec->xfBuf[0][xPos + j], -aec->xfBuf[1][xPos + j], - ef[0][j], ef[1][j]); + ef[0][j], + ef[1][j]); } fft[1] = MulRe(aec->xfBuf[0][xPos + PART_LEN], -aec->xfBuf[1][xPos + PART_LEN], - ef[0][PART_LEN], ef[1][PART_LEN]); + ef[0][PART_LEN], + ef[1][PART_LEN]); aec_rdft_inverse_128(fft); memset(fft + PART_LEN, 0, sizeof(float) * PART_LEN); @@ -406,7 +392,8 @@ static void FilterAdaptation(AecCore* aec, float *fft, float ef[2][PART_LEN1]) { } } -static void OverdriveAndSuppress(AecCore* aec, float hNl[PART_LEN1], +static void OverdriveAndSuppress(AecCore* aec, + float hNl[PART_LEN1], const float hNlFb, float efw[2][PART_LEN1]) { int i; @@ -414,7 +401,7 @@ static void OverdriveAndSuppress(AecCore* aec, float hNl[PART_LEN1], // Weight subbands if (hNl[i] > hNlFb) { hNl[i] = WebRtcAec_weightCurve[i] * hNlFb + - (1 - WebRtcAec_weightCurve[i]) * hNl[i]; + (1 - WebRtcAec_weightCurve[i]) * hNl[i]; } hNl[i] = powf(hNl[i], aec->overDriveSm * WebRtcAec_overDriveCurve[i]); @@ -433,158 +420,164 @@ WebRtcAec_ScaleErrorSignal_t WebRtcAec_ScaleErrorSignal; WebRtcAec_FilterAdaptation_t WebRtcAec_FilterAdaptation; WebRtcAec_OverdriveAndSuppress_t WebRtcAec_OverdriveAndSuppress; -int WebRtcAec_InitAec(AecCore* aec, int sampFreq) -{ - int i; +int WebRtcAec_InitAec(AecCore* aec, int sampFreq) { + int i; - aec->sampFreq = sampFreq; + aec->sampFreq = sampFreq; - if (sampFreq == 8000) { - aec->normal_mu = 0.6f; - aec->normal_error_threshold = 2e-6f; - } - else { - aec->normal_mu = 0.5f; - aec->normal_error_threshold = 1.5e-6f; - } + if (sampFreq == 8000) { + aec->normal_mu = 0.6f; + aec->normal_error_threshold = 2e-6f; + } else { + aec->normal_mu = 0.5f; + aec->normal_error_threshold = 1.5e-6f; + } - if (WebRtc_InitBuffer(aec->nearFrBuf) == -1) { - return -1; - } + if (WebRtc_InitBuffer(aec->nearFrBuf) == -1) { + return -1; + } - if (WebRtc_InitBuffer(aec->outFrBuf) == -1) { - return -1; - } + if (WebRtc_InitBuffer(aec->outFrBuf) == -1) { + return -1; + } - if (WebRtc_InitBuffer(aec->nearFrBufH) == -1) { - return -1; - } + if (WebRtc_InitBuffer(aec->nearFrBufH) == -1) { + return -1; + } - if (WebRtc_InitBuffer(aec->outFrBufH) == -1) { - return -1; - } + if (WebRtc_InitBuffer(aec->outFrBufH) == -1) { + return -1; + } - // Initialize far-end buffers. - if (WebRtc_InitBuffer(aec->far_buf) == -1) { - return -1; - } - if (WebRtc_InitBuffer(aec->far_buf_windowed) == -1) { - return -1; - } + // Initialize far-end buffers. + if (WebRtc_InitBuffer(aec->far_buf) == -1) { + return -1; + } + if (WebRtc_InitBuffer(aec->far_buf_windowed) == -1) { + return -1; + } #ifdef WEBRTC_AEC_DEBUG_DUMP - if (WebRtc_InitBuffer(aec->far_time_buf) == -1) { - return -1; - } + if (WebRtc_InitBuffer(aec->far_time_buf) == -1) { + return -1; + } #endif - aec->system_delay = 0; + aec->system_delay = 0; - if (WebRtc_InitDelayEstimatorFarend(aec->delay_estimator_farend) != 0) { - return -1; - } - if (WebRtc_InitDelayEstimator(aec->delay_estimator) != 0) { - return -1; - } - aec->delay_logging_enabled = 0; - memset(aec->delay_histogram, 0, sizeof(aec->delay_histogram)); + if (WebRtc_InitDelayEstimatorFarend(aec->delay_estimator_farend) != 0) { + return -1; + } + if (WebRtc_InitDelayEstimator(aec->delay_estimator) != 0) { + return -1; + } + aec->delay_logging_enabled = 0; + memset(aec->delay_histogram, 0, sizeof(aec->delay_histogram)); - aec->extended_filter_enabled = 0; - aec->num_partitions = kNormalNumPartitions; + aec->extended_filter_enabled = 0; + aec->num_partitions = kNormalNumPartitions; - // Default target suppression mode. - aec->nlp_mode = 1; + // Update the delay estimator with filter length. We use half the + // |num_partitions| to take the echo path into account. In practice we say + // that the echo has a duration of maximum half |num_partitions|, which is not + // true, but serves as a crude measure. + WebRtc_set_allowed_offset(aec->delay_estimator, aec->num_partitions / 2); + // TODO(bjornv): I currently hard coded the enable. Once we've established + // that AECM has no performance regression, robust_validation will be enabled + // all the time and the APIs to turn it on/off will be removed. Hence, remove + // this line then. + WebRtc_enable_robust_validation(aec->delay_estimator, 1); - // Sampling frequency multiplier - // SWB is processed as 160 frame size - if (aec->sampFreq == 32000) { - aec->mult = (short)aec->sampFreq / 16000; - } - else { - aec->mult = (short)aec->sampFreq / 8000; - } + // Default target suppression mode. + aec->nlp_mode = 1; - aec->farBufWritePos = 0; - aec->farBufReadPos = 0; + // Sampling frequency multiplier + // SWB is processed as 160 frame size + if (aec->sampFreq == 32000) { + aec->mult = (short)aec->sampFreq / 16000; + } else { + aec->mult = (short)aec->sampFreq / 8000; + } - aec->inSamples = 0; - aec->outSamples = 0; - aec->knownDelay = 0; + aec->farBufWritePos = 0; + aec->farBufReadPos = 0; - // Initialize buffers - memset(aec->dBuf, 0, sizeof(aec->dBuf)); - memset(aec->eBuf, 0, sizeof(aec->eBuf)); - // For H band - memset(aec->dBufH, 0, sizeof(aec->dBufH)); + aec->inSamples = 0; + aec->outSamples = 0; + aec->knownDelay = 0; - memset(aec->xPow, 0, sizeof(aec->xPow)); - memset(aec->dPow, 0, sizeof(aec->dPow)); - memset(aec->dInitMinPow, 0, sizeof(aec->dInitMinPow)); - aec->noisePow = aec->dInitMinPow; - aec->noiseEstCtr = 0; + // Initialize buffers + memset(aec->dBuf, 0, sizeof(aec->dBuf)); + memset(aec->eBuf, 0, sizeof(aec->eBuf)); + // For H band + memset(aec->dBufH, 0, sizeof(aec->dBufH)); - // Initial comfort noise power - for (i = 0; i < PART_LEN1; i++) { - aec->dMinPow[i] = 1.0e6f; - } + memset(aec->xPow, 0, sizeof(aec->xPow)); + memset(aec->dPow, 0, sizeof(aec->dPow)); + memset(aec->dInitMinPow, 0, sizeof(aec->dInitMinPow)); + aec->noisePow = aec->dInitMinPow; + aec->noiseEstCtr = 0; - // Holds the last block written to - aec->xfBufBlockPos = 0; - // TODO: Investigate need for these initializations. Deleting them doesn't - // change the output at all and yields 0.4% overall speedup. - memset(aec->xfBuf, 0, sizeof(complex_t) * kExtendedNumPartitions * - PART_LEN1); - memset(aec->wfBuf, 0, sizeof(complex_t) * kExtendedNumPartitions * - PART_LEN1); - memset(aec->sde, 0, sizeof(complex_t) * PART_LEN1); - memset(aec->sxd, 0, sizeof(complex_t) * PART_LEN1); - memset(aec->xfwBuf, 0, sizeof(complex_t) * kExtendedNumPartitions * - PART_LEN1); - memset(aec->se, 0, sizeof(float) * PART_LEN1); + // Initial comfort noise power + for (i = 0; i < PART_LEN1; i++) { + aec->dMinPow[i] = 1.0e6f; + } - // To prevent numerical instability in the first block. - for (i = 0; i < PART_LEN1; i++) { - aec->sd[i] = 1; - } - for (i = 0; i < PART_LEN1; i++) { - aec->sx[i] = 1; - } + // Holds the last block written to + aec->xfBufBlockPos = 0; + // TODO: Investigate need for these initializations. Deleting them doesn't + // change the output at all and yields 0.4% overall speedup. + memset(aec->xfBuf, 0, sizeof(complex_t) * kExtendedNumPartitions * PART_LEN1); + memset(aec->wfBuf, 0, sizeof(complex_t) * kExtendedNumPartitions * PART_LEN1); + memset(aec->sde, 0, sizeof(complex_t) * PART_LEN1); + memset(aec->sxd, 0, sizeof(complex_t) * PART_LEN1); + memset( + aec->xfwBuf, 0, sizeof(complex_t) * kExtendedNumPartitions * PART_LEN1); + memset(aec->se, 0, sizeof(float) * PART_LEN1); - memset(aec->hNs, 0, sizeof(aec->hNs)); - memset(aec->outBuf, 0, sizeof(float) * PART_LEN); + // To prevent numerical instability in the first block. + for (i = 0; i < PART_LEN1; i++) { + aec->sd[i] = 1; + } + for (i = 0; i < PART_LEN1; i++) { + aec->sx[i] = 1; + } - aec->hNlFbMin = 1; - aec->hNlFbLocalMin = 1; - aec->hNlXdAvgMin = 1; - aec->hNlNewMin = 0; - aec->hNlMinCtr = 0; - aec->overDrive = 2; - aec->overDriveSm = 2; - aec->delayIdx = 0; - aec->stNearState = 0; - aec->echoState = 0; - aec->divergeState = 0; + memset(aec->hNs, 0, sizeof(aec->hNs)); + memset(aec->outBuf, 0, sizeof(float) * PART_LEN); - aec->seed = 777; - aec->delayEstCtr = 0; + aec->hNlFbMin = 1; + aec->hNlFbLocalMin = 1; + aec->hNlXdAvgMin = 1; + aec->hNlNewMin = 0; + aec->hNlMinCtr = 0; + aec->overDrive = 2; + aec->overDriveSm = 2; + aec->delayIdx = 0; + aec->stNearState = 0; + aec->echoState = 0; + aec->divergeState = 0; - // Metrics disabled by default - aec->metricsMode = 0; - InitMetrics(aec); + aec->seed = 777; + aec->delayEstCtr = 0; - // Assembly optimization - WebRtcAec_FilterFar = FilterFar; - WebRtcAec_ScaleErrorSignal = ScaleErrorSignal; - WebRtcAec_FilterAdaptation = FilterAdaptation; - WebRtcAec_OverdriveAndSuppress = OverdriveAndSuppress; + // Metrics disabled by default + aec->metricsMode = 0; + InitMetrics(aec); + + // Assembly optimization + WebRtcAec_FilterFar = FilterFar; + WebRtcAec_ScaleErrorSignal = ScaleErrorSignal; + WebRtcAec_FilterAdaptation = FilterAdaptation; + WebRtcAec_OverdriveAndSuppress = OverdriveAndSuppress; #if defined(WEBRTC_ARCH_X86_FAMILY) - if (WebRtc_GetCPUInfo(kSSE2)) { - WebRtcAec_InitAec_SSE2(); - } + if (WebRtc_GetCPUInfo(kSSE2)) { + WebRtcAec_InitAec_SSE2(); + } #endif - aec_rdft_init(); + aec_rdft_init(); - return 0; + return 0; } void WebRtcAec_BufferFarendPartition(AecCore* aec, const float* farend) { @@ -622,80 +615,80 @@ void WebRtcAec_ProcessFrame(AecCore* aec, int knownDelay, int16_t* out, int16_t* outH) { - int out_elements = 0; + int out_elements = 0; - // For each frame the process is as follows: - // 1) If the system_delay indicates on being too small for processing a - // frame we stuff the buffer with enough data for 10 ms. - // 2) Adjust the buffer to the system delay, by moving the read pointer. - // 3) TODO(bjornv): Investigate if we need to add this: - // If we can't move read pointer due to buffer size limitations we - // flush/stuff the buffer. - // 4) Process as many partitions as possible. - // 5) Update the |system_delay| with respect to a full frame of FRAME_LEN - // samples. Even though we will have data left to process (we work with - // partitions) we consider updating a whole frame, since that's the - // amount of data we input and output in audio_processing. - // 6) Update the outputs. + // For each frame the process is as follows: + // 1) If the system_delay indicates on being too small for processing a + // frame we stuff the buffer with enough data for 10 ms. + // 2) Adjust the buffer to the system delay, by moving the read pointer. + // 3) TODO(bjornv): Investigate if we need to add this: + // If we can't move read pointer due to buffer size limitations we + // flush/stuff the buffer. + // 4) Process as many partitions as possible. + // 5) Update the |system_delay| with respect to a full frame of FRAME_LEN + // samples. Even though we will have data left to process (we work with + // partitions) we consider updating a whole frame, since that's the + // amount of data we input and output in audio_processing. + // 6) Update the outputs. - // TODO(bjornv): Investigate how we should round the delay difference; right - // now we know that incoming |knownDelay| is underestimated when it's less - // than |aec->knownDelay|. We therefore, round (-32) in that direction. In - // the other direction, we don't have this situation, but might flush one - // partition too little. This can cause non-causality, which should be - // investigated. Maybe, allow for a non-symmetric rounding, like -16. - int move_elements = (aec->knownDelay - knownDelay - 32) / PART_LEN; - int moved_elements = 0; + // TODO(bjornv): Investigate how we should round the delay difference; right + // now we know that incoming |knownDelay| is underestimated when it's less + // than |aec->knownDelay|. We therefore, round (-32) in that direction. In + // the other direction, we don't have this situation, but might flush one + // partition too little. This can cause non-causality, which should be + // investigated. Maybe, allow for a non-symmetric rounding, like -16. + int move_elements = (aec->knownDelay - knownDelay - 32) / PART_LEN; + int moved_elements = 0; - // TODO(bjornv): Change the near-end buffer handling to be the same as for - // far-end, that is, with a near_pre_buf. - // Buffer the near-end frame. - WebRtc_WriteBuffer(aec->nearFrBuf, nearend, FRAME_LEN); - // For H band - if (aec->sampFreq == 32000) { - WebRtc_WriteBuffer(aec->nearFrBufH, nearendH, FRAME_LEN); - } + // TODO(bjornv): Change the near-end buffer handling to be the same as for + // far-end, that is, with a near_pre_buf. + // Buffer the near-end frame. + WebRtc_WriteBuffer(aec->nearFrBuf, nearend, FRAME_LEN); + // For H band + if (aec->sampFreq == 32000) { + WebRtc_WriteBuffer(aec->nearFrBufH, nearendH, FRAME_LEN); + } - // 1) At most we process |aec->mult|+1 partitions in 10 ms. Make sure we - // have enough far-end data for that by stuffing the buffer if the - // |system_delay| indicates others. - if (aec->system_delay < FRAME_LEN) { - // We don't have enough data so we rewind 10 ms. - WebRtcAec_MoveFarReadPtr(aec, -(aec->mult + 1)); - } + // 1) At most we process |aec->mult|+1 partitions in 10 ms. Make sure we + // have enough far-end data for that by stuffing the buffer if the + // |system_delay| indicates others. + if (aec->system_delay < FRAME_LEN) { + // We don't have enough data so we rewind 10 ms. + WebRtcAec_MoveFarReadPtr(aec, -(aec->mult + 1)); + } - // 2) Compensate for a possible change in the system delay. - WebRtc_MoveReadPtr(aec->far_buf_windowed, move_elements); - moved_elements = WebRtc_MoveReadPtr(aec->far_buf, move_elements); - aec->knownDelay -= moved_elements * PART_LEN; + // 2) Compensate for a possible change in the system delay. + WebRtc_MoveReadPtr(aec->far_buf_windowed, move_elements); + moved_elements = WebRtc_MoveReadPtr(aec->far_buf, move_elements); + aec->knownDelay -= moved_elements * PART_LEN; #ifdef WEBRTC_AEC_DEBUG_DUMP - WebRtc_MoveReadPtr(aec->far_time_buf, move_elements); + WebRtc_MoveReadPtr(aec->far_time_buf, move_elements); #endif - // 4) Process as many blocks as possible. - while (WebRtc_available_read(aec->nearFrBuf) >= PART_LEN) { - ProcessBlock(aec); - } + // 4) Process as many blocks as possible. + while (WebRtc_available_read(aec->nearFrBuf) >= PART_LEN) { + ProcessBlock(aec); + } - // 5) Update system delay with respect to the entire frame. - aec->system_delay -= FRAME_LEN; + // 5) Update system delay with respect to the entire frame. + aec->system_delay -= FRAME_LEN; - // 6) Update output frame. - // Stuff the out buffer if we have less than a frame to output. - // This should only happen for the first frame. - out_elements = (int) WebRtc_available_read(aec->outFrBuf); - if (out_elements < FRAME_LEN) { - WebRtc_MoveReadPtr(aec->outFrBuf, out_elements - FRAME_LEN); - if (aec->sampFreq == 32000) { - WebRtc_MoveReadPtr(aec->outFrBufH, out_elements - FRAME_LEN); - } - } - // Obtain an output frame. - WebRtc_ReadBuffer(aec->outFrBuf, NULL, out, FRAME_LEN); - // For H band. + // 6) Update output frame. + // Stuff the out buffer if we have less than a frame to output. + // This should only happen for the first frame. + out_elements = (int)WebRtc_available_read(aec->outFrBuf); + if (out_elements < FRAME_LEN) { + WebRtc_MoveReadPtr(aec->outFrBuf, out_elements - FRAME_LEN); if (aec->sampFreq == 32000) { - WebRtc_ReadBuffer(aec->outFrBufH, NULL, outH, FRAME_LEN); + WebRtc_MoveReadPtr(aec->outFrBufH, out_elements - FRAME_LEN); } + } + // Obtain an output frame. + WebRtc_ReadBuffer(aec->outFrBuf, NULL, out, FRAME_LEN); + // For H band. + if (aec->sampFreq == 32000) { + WebRtc_ReadBuffer(aec->outFrBufH, NULL, outH, FRAME_LEN); + } } int WebRtcAec_GetDelayMetricsCore(AecCore* self, int* median, int* std) { @@ -742,9 +735,9 @@ int WebRtcAec_GetDelayMetricsCore(AecCore* self, int* median, int* std) { // Calculate the L1 norm, with median value as central moment. for (i = 0; i < kHistorySizeBlocks; i++) { - l1_norm += (float) (fabs(i - my_median) * self->delay_histogram[i]); + l1_norm += (float)(fabs(i - my_median) * self->delay_histogram[i]); } - *std = (int) (l1_norm / (float) num_delay_values + 0.5f) * kMsPerBlock; + *std = (int)(l1_norm / (float)num_delay_values + 0.5f) * kMsPerBlock; // Reset histogram. memset(self->delay_histogram, 0, sizeof(self->delay_histogram)); @@ -752,11 +745,11 @@ int WebRtcAec_GetDelayMetricsCore(AecCore* self, int* median, int* std) { return 0; } -int WebRtcAec_echo_state(AecCore* self) { - return self->echoState; -} +int WebRtcAec_echo_state(AecCore* self) { return self->echoState; } -void WebRtcAec_GetEchoStats(AecCore* self, Stats* erl, Stats* erle, +void WebRtcAec_GetEchoStats(AecCore* self, + Stats* erl, + Stats* erle, Stats* a_nlp) { assert(erl != NULL); assert(erle != NULL); @@ -767,12 +760,12 @@ void WebRtcAec_GetEchoStats(AecCore* self, Stats* erl, Stats* erle, } #ifdef WEBRTC_AEC_DEBUG_DUMP -void* WebRtcAec_far_time_buf(AecCore* self) { - return self->far_time_buf; -} +void* WebRtcAec_far_time_buf(AecCore* self) { return self->far_time_buf; } #endif -void WebRtcAec_SetConfigCore(AecCore* self, int nlp_mode, int metrics_mode, +void WebRtcAec_SetConfigCore(AecCore* self, + int nlp_mode, + int metrics_mode, int delay_logging) { assert(nlp_mode >= 0 && nlp_mode < 3); self->nlp_mode = nlp_mode; @@ -789,15 +782,15 @@ void WebRtcAec_SetConfigCore(AecCore* self, int nlp_mode, int metrics_mode, void WebRtcAec_enable_delay_correction(AecCore* self, int enable) { self->extended_filter_enabled = enable; self->num_partitions = enable ? kExtendedNumPartitions : kNormalNumPartitions; + // Update the delay estimator with filter length. See InitAEC() for details. + WebRtc_set_allowed_offset(self->delay_estimator, self->num_partitions / 2); } int WebRtcAec_delay_correction_enabled(AecCore* self) { return self->extended_filter_enabled; } -int WebRtcAec_system_delay(AecCore* self) { - return self->system_delay; -} +int WebRtcAec_system_delay(AecCore* self) { return self->system_delay; } void WebRtcAec_SetSystemDelay(AecCore* self, int delay) { assert(delay >= 0); @@ -805,665 +798,668 @@ void WebRtcAec_SetSystemDelay(AecCore* self, int delay) { } static void ProcessBlock(AecCore* aec) { - int i; - float d[PART_LEN], y[PART_LEN], e[PART_LEN], dH[PART_LEN]; - float scale; + int i; + float d[PART_LEN], y[PART_LEN], e[PART_LEN], dH[PART_LEN]; + float scale; - float fft[PART_LEN2]; - float xf[2][PART_LEN1], yf[2][PART_LEN1], ef[2][PART_LEN1]; - float df[2][PART_LEN1]; - float far_spectrum = 0.0f; - float near_spectrum = 0.0f; - float abs_far_spectrum[PART_LEN1]; - float abs_near_spectrum[PART_LEN1]; + float fft[PART_LEN2]; + float xf[2][PART_LEN1], yf[2][PART_LEN1], ef[2][PART_LEN1]; + float df[2][PART_LEN1]; + float far_spectrum = 0.0f; + float near_spectrum = 0.0f; + float abs_far_spectrum[PART_LEN1]; + float abs_near_spectrum[PART_LEN1]; - const float gPow[2] = {0.9f, 0.1f}; + const float gPow[2] = {0.9f, 0.1f}; - // Noise estimate constants. - const int noiseInitBlocks = 500 * aec->mult; - const float step = 0.1f; - const float ramp = 1.0002f; - const float gInitNoise[2] = {0.999f, 0.001f}; + // Noise estimate constants. + const int noiseInitBlocks = 500 * aec->mult; + const float step = 0.1f; + const float ramp = 1.0002f; + const float gInitNoise[2] = {0.999f, 0.001f}; - int16_t nearend[PART_LEN]; - int16_t* nearend_ptr = NULL; - int16_t output[PART_LEN]; - int16_t outputH[PART_LEN]; + int16_t nearend[PART_LEN]; + int16_t* nearend_ptr = NULL; + int16_t output[PART_LEN]; + int16_t outputH[PART_LEN]; - float* xf_ptr = NULL; + float* xf_ptr = NULL; - memset(dH, 0, sizeof(dH)); - if (aec->sampFreq == 32000) { - // Get the upper band first so we can reuse |nearend|. - WebRtc_ReadBuffer(aec->nearFrBufH, - (void**) &nearend_ptr, - nearend, - PART_LEN); - for (i = 0; i < PART_LEN; i++) { - dH[i] = (float) (nearend_ptr[i]); - } - memcpy(aec->dBufH + PART_LEN, dH, sizeof(float) * PART_LEN); - } - WebRtc_ReadBuffer(aec->nearFrBuf, (void**) &nearend_ptr, nearend, PART_LEN); - - // ---------- Ooura fft ---------- - // Concatenate old and new nearend blocks. + memset(dH, 0, sizeof(dH)); + if (aec->sampFreq == 32000) { + // Get the upper band first so we can reuse |nearend|. + WebRtc_ReadBuffer(aec->nearFrBufH, (void**)&nearend_ptr, nearend, PART_LEN); for (i = 0; i < PART_LEN; i++) { - d[i] = (float) (nearend_ptr[i]); + dH[i] = (float)(nearend_ptr[i]); } - memcpy(aec->dBuf + PART_LEN, d, sizeof(float) * PART_LEN); + memcpy(aec->dBufH + PART_LEN, dH, sizeof(float) * PART_LEN); + } + WebRtc_ReadBuffer(aec->nearFrBuf, (void**)&nearend_ptr, nearend, PART_LEN); + + // ---------- Ooura fft ---------- + // Concatenate old and new nearend blocks. + for (i = 0; i < PART_LEN; i++) { + d[i] = (float)(nearend_ptr[i]); + } + memcpy(aec->dBuf + PART_LEN, d, sizeof(float) * PART_LEN); #ifdef WEBRTC_AEC_DEBUG_DUMP - { - int16_t farend[PART_LEN]; - int16_t* farend_ptr = NULL; - WebRtc_ReadBuffer(aec->far_time_buf, (void**) &farend_ptr, farend, 1); - (void)fwrite(farend_ptr, sizeof(int16_t), PART_LEN, aec->farFile); - (void)fwrite(nearend_ptr, sizeof(int16_t), PART_LEN, aec->nearFile); - } + { + int16_t farend[PART_LEN]; + int16_t* farend_ptr = NULL; + WebRtc_ReadBuffer(aec->far_time_buf, (void**)&farend_ptr, farend, 1); + (void)fwrite(farend_ptr, sizeof(int16_t), PART_LEN, aec->farFile); + (void)fwrite(nearend_ptr, sizeof(int16_t), PART_LEN, aec->nearFile); + } #endif - // We should always have at least one element stored in |far_buf|. - assert(WebRtc_available_read(aec->far_buf) > 0); - WebRtc_ReadBuffer(aec->far_buf, (void**) &xf_ptr, &xf[0][0], 1); + // We should always have at least one element stored in |far_buf|. + assert(WebRtc_available_read(aec->far_buf) > 0); + WebRtc_ReadBuffer(aec->far_buf, (void**)&xf_ptr, &xf[0][0], 1); - // Near fft - memcpy(fft, aec->dBuf, sizeof(float) * PART_LEN2); - TimeToFrequency(fft, df, 0); + // Near fft + memcpy(fft, aec->dBuf, sizeof(float) * PART_LEN2); + TimeToFrequency(fft, df, 0); - // Power smoothing + // Power smoothing + for (i = 0; i < PART_LEN1; i++) { + far_spectrum = (xf_ptr[i] * xf_ptr[i]) + + (xf_ptr[PART_LEN1 + i] * xf_ptr[PART_LEN1 + i]); + aec->xPow[i] = + gPow[0] * aec->xPow[i] + gPow[1] * aec->num_partitions * far_spectrum; + // Calculate absolute spectra + abs_far_spectrum[i] = sqrtf(far_spectrum); + + near_spectrum = df[0][i] * df[0][i] + df[1][i] * df[1][i]; + aec->dPow[i] = gPow[0] * aec->dPow[i] + gPow[1] * near_spectrum; + // Calculate absolute spectra + abs_near_spectrum[i] = sqrtf(near_spectrum); + } + + // Estimate noise power. Wait until dPow is more stable. + if (aec->noiseEstCtr > 50) { for (i = 0; i < PART_LEN1; i++) { - far_spectrum = (xf_ptr[i] * xf_ptr[i]) + - (xf_ptr[PART_LEN1 + i] * xf_ptr[PART_LEN1 + i]); - aec->xPow[i] = gPow[0] * aec->xPow[i] + gPow[1] * aec->num_partitions * - far_spectrum; - // Calculate absolute spectra - abs_far_spectrum[i] = sqrtf(far_spectrum); - - near_spectrum = df[0][i] * df[0][i] + df[1][i] * df[1][i]; - aec->dPow[i] = gPow[0] * aec->dPow[i] + gPow[1] * near_spectrum; - // Calculate absolute spectra - abs_near_spectrum[i] = sqrtf(near_spectrum); - } - - // Estimate noise power. Wait until dPow is more stable. - if (aec->noiseEstCtr > 50) { - for (i = 0; i < PART_LEN1; i++) { - if (aec->dPow[i] < aec->dMinPow[i]) { - aec->dMinPow[i] = (aec->dPow[i] + step * (aec->dMinPow[i] - - aec->dPow[i])) * ramp; - } - else { - aec->dMinPow[i] *= ramp; - } - } - } - - // Smooth increasing noise power from zero at the start, - // to avoid a sudden burst of comfort noise. - if (aec->noiseEstCtr < noiseInitBlocks) { - aec->noiseEstCtr++; - for (i = 0; i < PART_LEN1; i++) { - if (aec->dMinPow[i] > aec->dInitMinPow[i]) { - aec->dInitMinPow[i] = gInitNoise[0] * aec->dInitMinPow[i] + - gInitNoise[1] * aec->dMinPow[i]; - } - else { - aec->dInitMinPow[i] = aec->dMinPow[i]; - } - } - aec->noisePow = aec->dInitMinPow; - } - else { - aec->noisePow = aec->dMinPow; - } - - // Block wise delay estimation used for logging - if (aec->delay_logging_enabled) { - int delay_estimate = 0; - if (WebRtc_AddFarSpectrumFloat(aec->delay_estimator_farend, - abs_far_spectrum, PART_LEN1) == 0) { - delay_estimate = WebRtc_DelayEstimatorProcessFloat(aec->delay_estimator, - abs_near_spectrum, - PART_LEN1); - if (delay_estimate >= 0) { - // Update delay estimate buffer. - aec->delay_histogram[delay_estimate]++; - } + if (aec->dPow[i] < aec->dMinPow[i]) { + aec->dMinPow[i] = + (aec->dPow[i] + step * (aec->dMinPow[i] - aec->dPow[i])) * ramp; + } else { + aec->dMinPow[i] *= ramp; } } + } - // Update the xfBuf block position. - aec->xfBufBlockPos--; - if (aec->xfBufBlockPos == -1) { - aec->xfBufBlockPos = aec->num_partitions - 1; + // Smooth increasing noise power from zero at the start, + // to avoid a sudden burst of comfort noise. + if (aec->noiseEstCtr < noiseInitBlocks) { + aec->noiseEstCtr++; + for (i = 0; i < PART_LEN1; i++) { + if (aec->dMinPow[i] > aec->dInitMinPow[i]) { + aec->dInitMinPow[i] = gInitNoise[0] * aec->dInitMinPow[i] + + gInitNoise[1] * aec->dMinPow[i]; + } else { + aec->dInitMinPow[i] = aec->dMinPow[i]; + } } + aec->noisePow = aec->dInitMinPow; + } else { + aec->noisePow = aec->dMinPow; + } - // Buffer xf - memcpy(aec->xfBuf[0] + aec->xfBufBlockPos * PART_LEN1, xf_ptr, - sizeof(float) * PART_LEN1); - memcpy(aec->xfBuf[1] + aec->xfBufBlockPos * PART_LEN1, &xf_ptr[PART_LEN1], - sizeof(float) * PART_LEN1); - - memset(yf, 0, sizeof(yf)); - - // Filter far - WebRtcAec_FilterFar(aec, yf); - - // Inverse fft to obtain echo estimate and error. - fft[0] = yf[0][0]; - fft[1] = yf[0][PART_LEN]; - for (i = 1; i < PART_LEN; i++) { - fft[2 * i] = yf[0][i]; - fft[2 * i + 1] = yf[1][i]; + // Block wise delay estimation used for logging + if (aec->delay_logging_enabled) { + int delay_estimate = 0; + if (WebRtc_AddFarSpectrumFloat( + aec->delay_estimator_farend, abs_far_spectrum, PART_LEN1) == 0) { + delay_estimate = WebRtc_DelayEstimatorProcessFloat( + aec->delay_estimator, abs_near_spectrum, PART_LEN1); + if (delay_estimate >= 0) { + // Update delay estimate buffer. + aec->delay_histogram[delay_estimate]++; + } } - aec_rdft_inverse_128(fft); + } - scale = 2.0f / PART_LEN2; - for (i = 0; i < PART_LEN; i++) { - y[i] = fft[PART_LEN + i] * scale; // fft scaling - } + // Update the xfBuf block position. + aec->xfBufBlockPos--; + if (aec->xfBufBlockPos == -1) { + aec->xfBufBlockPos = aec->num_partitions - 1; + } - for (i = 0; i < PART_LEN; i++) { - e[i] = d[i] - y[i]; - } + // Buffer xf + memcpy(aec->xfBuf[0] + aec->xfBufBlockPos * PART_LEN1, + xf_ptr, + sizeof(float) * PART_LEN1); + memcpy(aec->xfBuf[1] + aec->xfBufBlockPos * PART_LEN1, + &xf_ptr[PART_LEN1], + sizeof(float) * PART_LEN1); - // Error fft - memcpy(aec->eBuf + PART_LEN, e, sizeof(float) * PART_LEN); - memset(fft, 0, sizeof(float) * PART_LEN); - memcpy(fft + PART_LEN, e, sizeof(float) * PART_LEN); - // TODO(bjornv): Change to use TimeToFrequency(). - aec_rdft_forward_128(fft); + memset(yf, 0, sizeof(yf)); - ef[1][0] = 0; - ef[1][PART_LEN] = 0; - ef[0][0] = fft[0]; - ef[0][PART_LEN] = fft[1]; - for (i = 1; i < PART_LEN; i++) { - ef[0][i] = fft[2 * i]; - ef[1][i] = fft[2 * i + 1]; - } + // Filter far + WebRtcAec_FilterFar(aec, yf); - if (aec->metricsMode == 1) { - // Note that the first PART_LEN samples in fft (before transformation) are - // zero. Hence, the scaling by two in UpdateLevel() should not be - // performed. That scaling is taken care of in UpdateMetrics() instead. - UpdateLevel(&aec->linoutlevel, ef); - } + // Inverse fft to obtain echo estimate and error. + fft[0] = yf[0][0]; + fft[1] = yf[0][PART_LEN]; + for (i = 1; i < PART_LEN; i++) { + fft[2 * i] = yf[0][i]; + fft[2 * i + 1] = yf[1][i]; + } + aec_rdft_inverse_128(fft); - // Scale error signal inversely with far power. - WebRtcAec_ScaleErrorSignal(aec, ef); - WebRtcAec_FilterAdaptation(aec, fft, ef); - NonLinearProcessing(aec, output, outputH); + scale = 2.0f / PART_LEN2; + for (i = 0; i < PART_LEN; i++) { + y[i] = fft[PART_LEN + i] * scale; // fft scaling + } - if (aec->metricsMode == 1) { - // Update power levels and echo metrics - UpdateLevel(&aec->farlevel, (float (*)[PART_LEN1]) xf_ptr); - UpdateLevel(&aec->nearlevel, df); - UpdateMetrics(aec); - } + for (i = 0; i < PART_LEN; i++) { + e[i] = d[i] - y[i]; + } - // Store the output block. - WebRtc_WriteBuffer(aec->outFrBuf, output, PART_LEN); - // For H band - if (aec->sampFreq == 32000) { - WebRtc_WriteBuffer(aec->outFrBufH, outputH, PART_LEN); - } + // Error fft + memcpy(aec->eBuf + PART_LEN, e, sizeof(float) * PART_LEN); + memset(fft, 0, sizeof(float) * PART_LEN); + memcpy(fft + PART_LEN, e, sizeof(float) * PART_LEN); + // TODO(bjornv): Change to use TimeToFrequency(). + aec_rdft_forward_128(fft); + + ef[1][0] = 0; + ef[1][PART_LEN] = 0; + ef[0][0] = fft[0]; + ef[0][PART_LEN] = fft[1]; + for (i = 1; i < PART_LEN; i++) { + ef[0][i] = fft[2 * i]; + ef[1][i] = fft[2 * i + 1]; + } + + if (aec->metricsMode == 1) { + // Note that the first PART_LEN samples in fft (before transformation) are + // zero. Hence, the scaling by two in UpdateLevel() should not be + // performed. That scaling is taken care of in UpdateMetrics() instead. + UpdateLevel(&aec->linoutlevel, ef); + } + + // Scale error signal inversely with far power. + WebRtcAec_ScaleErrorSignal(aec, ef); + WebRtcAec_FilterAdaptation(aec, fft, ef); + NonLinearProcessing(aec, output, outputH); + + if (aec->metricsMode == 1) { + // Update power levels and echo metrics + UpdateLevel(&aec->farlevel, (float(*)[PART_LEN1])xf_ptr); + UpdateLevel(&aec->nearlevel, df); + UpdateMetrics(aec); + } + + // Store the output block. + WebRtc_WriteBuffer(aec->outFrBuf, output, PART_LEN); + // For H band + if (aec->sampFreq == 32000) { + WebRtc_WriteBuffer(aec->outFrBufH, outputH, PART_LEN); + } #ifdef WEBRTC_AEC_DEBUG_DUMP - { - int16_t eInt16[PART_LEN]; - for (i = 0; i < PART_LEN; i++) { - eInt16[i] = (int16_t)WEBRTC_SPL_SAT(WEBRTC_SPL_WORD16_MAX, e[i], - WEBRTC_SPL_WORD16_MIN); - } - - (void)fwrite(eInt16, sizeof(int16_t), PART_LEN, aec->outLinearFile); - (void)fwrite(output, sizeof(int16_t), PART_LEN, aec->outFile); + { + int16_t eInt16[PART_LEN]; + for (i = 0; i < PART_LEN; i++) { + eInt16[i] = (int16_t)WEBRTC_SPL_SAT( + WEBRTC_SPL_WORD16_MAX, e[i], WEBRTC_SPL_WORD16_MIN); } + + (void)fwrite(eInt16, sizeof(int16_t), PART_LEN, aec->outLinearFile); + (void)fwrite(output, sizeof(int16_t), PART_LEN, aec->outFile); + } #endif } -static void NonLinearProcessing(AecCore* aec, short *output, short *outputH) -{ - float efw[2][PART_LEN1], dfw[2][PART_LEN1], xfw[2][PART_LEN1]; - complex_t comfortNoiseHband[PART_LEN1]; - float fft[PART_LEN2]; - float scale, dtmp; - float nlpGainHband; - int i, j, pos; +static void NonLinearProcessing(AecCore* aec, short* output, short* outputH) { + float efw[2][PART_LEN1], dfw[2][PART_LEN1], xfw[2][PART_LEN1]; + complex_t comfortNoiseHband[PART_LEN1]; + float fft[PART_LEN2]; + float scale, dtmp; + float nlpGainHband; + int i, j, pos; - // Coherence and non-linear filter - float cohde[PART_LEN1], cohxd[PART_LEN1]; - float hNlDeAvg, hNlXdAvg; - float hNl[PART_LEN1]; - float hNlPref[kPrefBandSize]; - float hNlFb = 0, hNlFbLow = 0; - const float prefBandQuant = 0.75f, prefBandQuantLow = 0.5f; - const int prefBandSize = kPrefBandSize / aec->mult; - const int minPrefBand = 4 / aec->mult; + // Coherence and non-linear filter + float cohde[PART_LEN1], cohxd[PART_LEN1]; + float hNlDeAvg, hNlXdAvg; + float hNl[PART_LEN1]; + float hNlPref[kPrefBandSize]; + float hNlFb = 0, hNlFbLow = 0; + const float prefBandQuant = 0.75f, prefBandQuantLow = 0.5f; + const int prefBandSize = kPrefBandSize / aec->mult; + const int minPrefBand = 4 / aec->mult; - // Near and error power sums - float sdSum = 0, seSum = 0; + // Near and error power sums + float sdSum = 0, seSum = 0; - // Power estimate smoothing coefficients. - const float *ptrGCoh = aec->extended_filter_enabled ? - kExtendedSmoothingCoefficients[aec->mult - 1] : - kNormalSmoothingCoefficients[aec->mult - 1]; - const float* min_overdrive = aec->extended_filter_enabled ? - kExtendedMinOverDrive : kNormalMinOverDrive; + // Power estimate smoothing coefficients. + const float* ptrGCoh = aec->extended_filter_enabled + ? kExtendedSmoothingCoefficients[aec->mult - 1] + : kNormalSmoothingCoefficients[aec->mult - 1]; + const float* min_overdrive = aec->extended_filter_enabled + ? kExtendedMinOverDrive + : kNormalMinOverDrive; - // Filter energy - float wfEnMax = 0, wfEn = 0; - const int delayEstInterval = 10 * aec->mult; + // Filter energy + float wfEnMax = 0, wfEn = 0; + const int delayEstInterval = 10 * aec->mult; - float* xfw_ptr = NULL; + float* xfw_ptr = NULL; - aec->delayEstCtr++; - if (aec->delayEstCtr == delayEstInterval) { - aec->delayEstCtr = 0; + aec->delayEstCtr++; + if (aec->delayEstCtr == delayEstInterval) { + aec->delayEstCtr = 0; + } + + // initialize comfort noise for H band + memset(comfortNoiseHband, 0, sizeof(comfortNoiseHband)); + nlpGainHband = (float)0.0; + dtmp = (float)0.0; + + // Measure energy in each filter partition to determine delay. + // TODO: Spread by computing one partition per block? + if (aec->delayEstCtr == 0) { + wfEnMax = 0; + aec->delayIdx = 0; + for (i = 0; i < aec->num_partitions; i++) { + pos = i * PART_LEN1; + wfEn = 0; + for (j = 0; j < PART_LEN1; j++) { + wfEn += aec->wfBuf[0][pos + j] * aec->wfBuf[0][pos + j] + + aec->wfBuf[1][pos + j] * aec->wfBuf[1][pos + j]; + } + + if (wfEn > wfEnMax) { + wfEnMax = wfEn; + aec->delayIdx = i; + } } + } - // initialize comfort noise for H band - memset(comfortNoiseHband, 0, sizeof(comfortNoiseHband)); - nlpGainHband = (float)0.0; - dtmp = (float)0.0; + // We should always have at least one element stored in |far_buf|. + assert(WebRtc_available_read(aec->far_buf_windowed) > 0); + // NLP + WebRtc_ReadBuffer(aec->far_buf_windowed, (void**)&xfw_ptr, &xfw[0][0], 1); - // Measure energy in each filter partition to determine delay. - // TODO: Spread by computing one partition per block? - if (aec->delayEstCtr == 0) { - wfEnMax = 0; - aec->delayIdx = 0; - for (i = 0; i < aec->num_partitions; i++) { - pos = i * PART_LEN1; - wfEn = 0; - for (j = 0; j < PART_LEN1; j++) { - wfEn += aec->wfBuf[0][pos + j] * aec->wfBuf[0][pos + j] + - aec->wfBuf[1][pos + j] * aec->wfBuf[1][pos + j]; - } + // TODO(bjornv): Investigate if we can reuse |far_buf_windowed| instead of + // |xfwBuf|. + // Buffer far. + memcpy(aec->xfwBuf, xfw_ptr, sizeof(float) * 2 * PART_LEN1); - if (wfEn > wfEnMax) { - wfEnMax = wfEn; - aec->delayIdx = i; - } - } - } + // Use delayed far. + memcpy(xfw, aec->xfwBuf + aec->delayIdx * PART_LEN1, sizeof(xfw)); - // We should always have at least one element stored in |far_buf|. - assert(WebRtc_available_read(aec->far_buf_windowed) > 0); - // NLP - WebRtc_ReadBuffer(aec->far_buf_windowed, (void**) &xfw_ptr, &xfw[0][0], 1); + // Windowed near fft + for (i = 0; i < PART_LEN; i++) { + fft[i] = aec->dBuf[i] * sqrtHanning[i]; + fft[PART_LEN + i] = aec->dBuf[PART_LEN + i] * sqrtHanning[PART_LEN - i]; + } + aec_rdft_forward_128(fft); - // TODO(bjornv): Investigate if we can reuse |far_buf_windowed| instead of - // |xfwBuf|. - // Buffer far. - memcpy(aec->xfwBuf, xfw_ptr, sizeof(float) * 2 * PART_LEN1); + dfw[1][0] = 0; + dfw[1][PART_LEN] = 0; + dfw[0][0] = fft[0]; + dfw[0][PART_LEN] = fft[1]; + for (i = 1; i < PART_LEN; i++) { + dfw[0][i] = fft[2 * i]; + dfw[1][i] = fft[2 * i + 1]; + } - // Use delayed far. - memcpy(xfw, aec->xfwBuf + aec->delayIdx * PART_LEN1, sizeof(xfw)); + // Windowed error fft + for (i = 0; i < PART_LEN; i++) { + fft[i] = aec->eBuf[i] * sqrtHanning[i]; + fft[PART_LEN + i] = aec->eBuf[PART_LEN + i] * sqrtHanning[PART_LEN - i]; + } + aec_rdft_forward_128(fft); + efw[1][0] = 0; + efw[1][PART_LEN] = 0; + efw[0][0] = fft[0]; + efw[0][PART_LEN] = fft[1]; + for (i = 1; i < PART_LEN; i++) { + efw[0][i] = fft[2 * i]; + efw[1][i] = fft[2 * i + 1]; + } - // Windowed near fft - for (i = 0; i < PART_LEN; i++) { - fft[i] = aec->dBuf[i] * sqrtHanning[i]; - fft[PART_LEN + i] = aec->dBuf[PART_LEN + i] * sqrtHanning[PART_LEN - i]; - } - aec_rdft_forward_128(fft); - - dfw[1][0] = 0; - dfw[1][PART_LEN] = 0; - dfw[0][0] = fft[0]; - dfw[0][PART_LEN] = fft[1]; - for (i = 1; i < PART_LEN; i++) { - dfw[0][i] = fft[2 * i]; - dfw[1][i] = fft[2 * i + 1]; - } - - // Windowed error fft - for (i = 0; i < PART_LEN; i++) { - fft[i] = aec->eBuf[i] * sqrtHanning[i]; - fft[PART_LEN + i] = aec->eBuf[PART_LEN + i] * sqrtHanning[PART_LEN - i]; - } - aec_rdft_forward_128(fft); - efw[1][0] = 0; - efw[1][PART_LEN] = 0; - efw[0][0] = fft[0]; - efw[0][PART_LEN] = fft[1]; - for (i = 1; i < PART_LEN; i++) { - efw[0][i] = fft[2 * i]; - efw[1][i] = fft[2 * i + 1]; - } - - // Smoothed PSD - for (i = 0; i < PART_LEN1; i++) { - aec->sd[i] = ptrGCoh[0] * aec->sd[i] + ptrGCoh[1] * - (dfw[0][i] * dfw[0][i] + dfw[1][i] * dfw[1][i]); - aec->se[i] = ptrGCoh[0] * aec->se[i] + ptrGCoh[1] * - (efw[0][i] * efw[0][i] + efw[1][i] * efw[1][i]); - // We threshold here to protect against the ill-effects of a zero farend. - // The threshold is not arbitrarily chosen, but balances protection and - // adverse interaction with the algorithm's tuning. - // TODO: investigate further why this is so sensitive. - aec->sx[i] = ptrGCoh[0] * aec->sx[i] + ptrGCoh[1] * + // Smoothed PSD + for (i = 0; i < PART_LEN1; i++) { + aec->sd[i] = ptrGCoh[0] * aec->sd[i] + + ptrGCoh[1] * (dfw[0][i] * dfw[0][i] + dfw[1][i] * dfw[1][i]); + aec->se[i] = ptrGCoh[0] * aec->se[i] + + ptrGCoh[1] * (efw[0][i] * efw[0][i] + efw[1][i] * efw[1][i]); + // We threshold here to protect against the ill-effects of a zero farend. + // The threshold is not arbitrarily chosen, but balances protection and + // adverse interaction with the algorithm's tuning. + // TODO: investigate further why this is so sensitive. + aec->sx[i] = + ptrGCoh[0] * aec->sx[i] + + ptrGCoh[1] * WEBRTC_SPL_MAX(xfw[0][i] * xfw[0][i] + xfw[1][i] * xfw[1][i], 15); - aec->sde[i][0] = ptrGCoh[0] * aec->sde[i][0] + ptrGCoh[1] * - (dfw[0][i] * efw[0][i] + dfw[1][i] * efw[1][i]); - aec->sde[i][1] = ptrGCoh[0] * aec->sde[i][1] + ptrGCoh[1] * - (dfw[0][i] * efw[1][i] - dfw[1][i] * efw[0][i]); + aec->sde[i][0] = + ptrGCoh[0] * aec->sde[i][0] + + ptrGCoh[1] * (dfw[0][i] * efw[0][i] + dfw[1][i] * efw[1][i]); + aec->sde[i][1] = + ptrGCoh[0] * aec->sde[i][1] + + ptrGCoh[1] * (dfw[0][i] * efw[1][i] - dfw[1][i] * efw[0][i]); - aec->sxd[i][0] = ptrGCoh[0] * aec->sxd[i][0] + ptrGCoh[1] * - (dfw[0][i] * xfw[0][i] + dfw[1][i] * xfw[1][i]); - aec->sxd[i][1] = ptrGCoh[0] * aec->sxd[i][1] + ptrGCoh[1] * - (dfw[0][i] * xfw[1][i] - dfw[1][i] * xfw[0][i]); + aec->sxd[i][0] = + ptrGCoh[0] * aec->sxd[i][0] + + ptrGCoh[1] * (dfw[0][i] * xfw[0][i] + dfw[1][i] * xfw[1][i]); + aec->sxd[i][1] = + ptrGCoh[0] * aec->sxd[i][1] + + ptrGCoh[1] * (dfw[0][i] * xfw[1][i] - dfw[1][i] * xfw[0][i]); - sdSum += aec->sd[i]; - seSum += aec->se[i]; + sdSum += aec->sd[i]; + seSum += aec->se[i]; + } + + // Divergent filter safeguard. + if (aec->divergeState == 0) { + if (seSum > sdSum) { + aec->divergeState = 1; } - - // Divergent filter safeguard. - if (aec->divergeState == 0) { - if (seSum > sdSum) { - aec->divergeState = 1; - } - } - else { - if (seSum * 1.05f < sdSum) { - aec->divergeState = 0; - } + } else { + if (seSum * 1.05f < sdSum) { + aec->divergeState = 0; } + } - if (aec->divergeState == 1) { - memcpy(efw, dfw, sizeof(efw)); - } + if (aec->divergeState == 1) { + memcpy(efw, dfw, sizeof(efw)); + } + if (!aec->extended_filter_enabled) { // Reset if error is significantly larger than nearend (13 dB). if (seSum > (19.95f * sdSum)) { - memset(aec->wfBuf, 0, sizeof(aec->wfBuf)); + memset(aec->wfBuf, 0, sizeof(aec->wfBuf)); + } + } + + // Subband coherence + for (i = 0; i < PART_LEN1; i++) { + cohde[i] = + (aec->sde[i][0] * aec->sde[i][0] + aec->sde[i][1] * aec->sde[i][1]) / + (aec->sd[i] * aec->se[i] + 1e-10f); + cohxd[i] = + (aec->sxd[i][0] * aec->sxd[i][0] + aec->sxd[i][1] * aec->sxd[i][1]) / + (aec->sx[i] * aec->sd[i] + 1e-10f); + } + + hNlXdAvg = 0; + for (i = minPrefBand; i < prefBandSize + minPrefBand; i++) { + hNlXdAvg += cohxd[i]; + } + hNlXdAvg /= prefBandSize; + hNlXdAvg = 1 - hNlXdAvg; + + hNlDeAvg = 0; + for (i = minPrefBand; i < prefBandSize + minPrefBand; i++) { + hNlDeAvg += cohde[i]; + } + hNlDeAvg /= prefBandSize; + + if (hNlXdAvg < 0.75f && hNlXdAvg < aec->hNlXdAvgMin) { + aec->hNlXdAvgMin = hNlXdAvg; + } + + if (hNlDeAvg > 0.98f && hNlXdAvg > 0.9f) { + aec->stNearState = 1; + } else if (hNlDeAvg < 0.95f || hNlXdAvg < 0.8f) { + aec->stNearState = 0; + } + + if (aec->hNlXdAvgMin == 1) { + aec->echoState = 0; + aec->overDrive = min_overdrive[aec->nlp_mode]; + + if (aec->stNearState == 1) { + memcpy(hNl, cohde, sizeof(hNl)); + hNlFb = hNlDeAvg; + hNlFbLow = hNlDeAvg; + } else { + for (i = 0; i < PART_LEN1; i++) { + hNl[i] = 1 - cohxd[i]; + } + hNlFb = hNlXdAvg; + hNlFbLow = hNlXdAvg; + } + } else { + + if (aec->stNearState == 1) { + aec->echoState = 0; + memcpy(hNl, cohde, sizeof(hNl)); + hNlFb = hNlDeAvg; + hNlFbLow = hNlDeAvg; + } else { + aec->echoState = 1; + for (i = 0; i < PART_LEN1; i++) { + hNl[i] = WEBRTC_SPL_MIN(cohde[i], 1 - cohxd[i]); + } + + // Select an order statistic from the preferred bands. + // TODO: Using quicksort now, but a selection algorithm may be preferred. + memcpy(hNlPref, &hNl[minPrefBand], sizeof(float) * prefBandSize); + qsort(hNlPref, prefBandSize, sizeof(float), CmpFloat); + hNlFb = hNlPref[(int)floor(prefBandQuant * (prefBandSize - 1))]; + hNlFbLow = hNlPref[(int)floor(prefBandQuantLow * (prefBandSize - 1))]; + } + } + + // Track the local filter minimum to determine suppression overdrive. + if (hNlFbLow < 0.6f && hNlFbLow < aec->hNlFbLocalMin) { + aec->hNlFbLocalMin = hNlFbLow; + aec->hNlFbMin = hNlFbLow; + aec->hNlNewMin = 1; + aec->hNlMinCtr = 0; + } + aec->hNlFbLocalMin = + WEBRTC_SPL_MIN(aec->hNlFbLocalMin + 0.0008f / aec->mult, 1); + aec->hNlXdAvgMin = WEBRTC_SPL_MIN(aec->hNlXdAvgMin + 0.0006f / aec->mult, 1); + + if (aec->hNlNewMin == 1) { + aec->hNlMinCtr++; + } + if (aec->hNlMinCtr == 2) { + aec->hNlNewMin = 0; + aec->hNlMinCtr = 0; + aec->overDrive = + WEBRTC_SPL_MAX(kTargetSupp[aec->nlp_mode] / + ((float)log(aec->hNlFbMin + 1e-10f) + 1e-10f), + min_overdrive[aec->nlp_mode]); + } + + // Smooth the overdrive. + if (aec->overDrive < aec->overDriveSm) { + aec->overDriveSm = 0.99f * aec->overDriveSm + 0.01f * aec->overDrive; + } else { + aec->overDriveSm = 0.9f * aec->overDriveSm + 0.1f * aec->overDrive; + } + + WebRtcAec_OverdriveAndSuppress(aec, hNl, hNlFb, efw); + + // Add comfort noise. + ComfortNoise(aec, efw, comfortNoiseHband, aec->noisePow, hNl); + + // TODO(bjornv): Investigate how to take the windowing below into account if + // needed. + if (aec->metricsMode == 1) { + // Note that we have a scaling by two in the time domain |eBuf|. + // In addition the time domain signal is windowed before transformation, + // losing half the energy on the average. We take care of the first + // scaling only in UpdateMetrics(). + UpdateLevel(&aec->nlpoutlevel, efw); + } + // Inverse error fft. + fft[0] = efw[0][0]; + fft[1] = efw[0][PART_LEN]; + for (i = 1; i < PART_LEN; i++) { + fft[2 * i] = efw[0][i]; + // Sign change required by Ooura fft. + fft[2 * i + 1] = -efw[1][i]; + } + aec_rdft_inverse_128(fft); + + // Overlap and add to obtain output. + scale = 2.0f / PART_LEN2; + for (i = 0; i < PART_LEN; i++) { + fft[i] *= scale; // fft scaling + fft[i] = fft[i] * sqrtHanning[i] + aec->outBuf[i]; + + // Saturation protection + output[i] = (short)WEBRTC_SPL_SAT( + WEBRTC_SPL_WORD16_MAX, fft[i], WEBRTC_SPL_WORD16_MIN); + + fft[PART_LEN + i] *= scale; // fft scaling + aec->outBuf[i] = fft[PART_LEN + i] * sqrtHanning[PART_LEN - i]; + } + + // For H band + if (aec->sampFreq == 32000) { + + // H band gain + // average nlp over low band: average over second half of freq spectrum + // (4->8khz) + GetHighbandGain(hNl, &nlpGainHband); + + // Inverse comfort_noise + if (flagHbandCn == 1) { + fft[0] = comfortNoiseHband[0][0]; + fft[1] = comfortNoiseHband[PART_LEN][0]; + for (i = 1; i < PART_LEN; i++) { + fft[2 * i] = comfortNoiseHband[i][0]; + fft[2 * i + 1] = comfortNoiseHband[i][1]; + } + aec_rdft_inverse_128(fft); + scale = 2.0f / PART_LEN2; } - // Subband coherence - for (i = 0; i < PART_LEN1; i++) { - cohde[i] = (aec->sde[i][0] * aec->sde[i][0] + aec->sde[i][1] * aec->sde[i][1]) / - (aec->sd[i] * aec->se[i] + 1e-10f); - cohxd[i] = (aec->sxd[i][0] * aec->sxd[i][0] + aec->sxd[i][1] * aec->sxd[i][1]) / - (aec->sx[i] * aec->sd[i] + 1e-10f); - } - - hNlXdAvg = 0; - for (i = minPrefBand; i < prefBandSize + minPrefBand; i++) { - hNlXdAvg += cohxd[i]; - } - hNlXdAvg /= prefBandSize; - hNlXdAvg = 1 - hNlXdAvg; - - hNlDeAvg = 0; - for (i = minPrefBand; i < prefBandSize + minPrefBand; i++) { - hNlDeAvg += cohde[i]; - } - hNlDeAvg /= prefBandSize; - - if (hNlXdAvg < 0.75f && hNlXdAvg < aec->hNlXdAvgMin) { - aec->hNlXdAvgMin = hNlXdAvg; - } - - if (hNlDeAvg > 0.98f && hNlXdAvg > 0.9f) { - aec->stNearState = 1; - } - else if (hNlDeAvg < 0.95f || hNlXdAvg < 0.8f) { - aec->stNearState = 0; - } - - if (aec->hNlXdAvgMin == 1) { - aec->echoState = 0; - aec->overDrive = min_overdrive[aec->nlp_mode]; - - if (aec->stNearState == 1) { - memcpy(hNl, cohde, sizeof(hNl)); - hNlFb = hNlDeAvg; - hNlFbLow = hNlDeAvg; - } - else { - for (i = 0; i < PART_LEN1; i++) { - hNl[i] = 1 - cohxd[i]; - } - hNlFb = hNlXdAvg; - hNlFbLow = hNlXdAvg; - } - } - else { - - if (aec->stNearState == 1) { - aec->echoState = 0; - memcpy(hNl, cohde, sizeof(hNl)); - hNlFb = hNlDeAvg; - hNlFbLow = hNlDeAvg; - } - else { - aec->echoState = 1; - for (i = 0; i < PART_LEN1; i++) { - hNl[i] = WEBRTC_SPL_MIN(cohde[i], 1 - cohxd[i]); - } - - // Select an order statistic from the preferred bands. - // TODO: Using quicksort now, but a selection algorithm may be preferred. - memcpy(hNlPref, &hNl[minPrefBand], sizeof(float) * prefBandSize); - qsort(hNlPref, prefBandSize, sizeof(float), CmpFloat); - hNlFb = hNlPref[(int)floor(prefBandQuant * (prefBandSize - 1))]; - hNlFbLow = hNlPref[(int)floor(prefBandQuantLow * (prefBandSize - 1))]; - } - } - - // Track the local filter minimum to determine suppression overdrive. - if (hNlFbLow < 0.6f && hNlFbLow < aec->hNlFbLocalMin) { - aec->hNlFbLocalMin = hNlFbLow; - aec->hNlFbMin = hNlFbLow; - aec->hNlNewMin = 1; - aec->hNlMinCtr = 0; - } - aec->hNlFbLocalMin = WEBRTC_SPL_MIN(aec->hNlFbLocalMin + 0.0008f / aec->mult, 1); - aec->hNlXdAvgMin = WEBRTC_SPL_MIN(aec->hNlXdAvgMin + 0.0006f / aec->mult, 1); - - if (aec->hNlNewMin == 1) { - aec->hNlMinCtr++; - } - if (aec->hNlMinCtr == 2) { - aec->hNlNewMin = 0; - aec->hNlMinCtr = 0; - aec->overDrive = WEBRTC_SPL_MAX(kTargetSupp[aec->nlp_mode] / - ((float)log(aec->hNlFbMin + 1e-10f) + 1e-10f), - min_overdrive[aec->nlp_mode]); - } - - // Smooth the overdrive. - if (aec->overDrive < aec->overDriveSm) { - aec->overDriveSm = 0.99f * aec->overDriveSm + 0.01f * aec->overDrive; - } - else { - aec->overDriveSm = 0.9f * aec->overDriveSm + 0.1f * aec->overDrive; - } - - WebRtcAec_OverdriveAndSuppress(aec, hNl, hNlFb, efw); - - // Add comfort noise. - ComfortNoise(aec, efw, comfortNoiseHband, aec->noisePow, hNl); - - // TODO(bjornv): Investigate how to take the windowing below into account if - // needed. - if (aec->metricsMode == 1) { - // Note that we have a scaling by two in the time domain |eBuf|. - // In addition the time domain signal is windowed before transformation, - // losing half the energy on the average. We take care of the first - // scaling only in UpdateMetrics(). - UpdateLevel(&aec->nlpoutlevel, efw); - } - // Inverse error fft. - fft[0] = efw[0][0]; - fft[1] = efw[0][PART_LEN]; - for (i = 1; i < PART_LEN; i++) { - fft[2*i] = efw[0][i]; - // Sign change required by Ooura fft. - fft[2*i + 1] = -efw[1][i]; - } - aec_rdft_inverse_128(fft); - - // Overlap and add to obtain output. - scale = 2.0f / PART_LEN2; + // compute gain factor for (i = 0; i < PART_LEN; i++) { - fft[i] *= scale; // fft scaling - fft[i] = fft[i]*sqrtHanning[i] + aec->outBuf[i]; + dtmp = (float)aec->dBufH[i]; + dtmp = (float)dtmp * nlpGainHband; // for variable gain - // Saturation protection - output[i] = (short)WEBRTC_SPL_SAT(WEBRTC_SPL_WORD16_MAX, fft[i], - WEBRTC_SPL_WORD16_MIN); + // add some comfort noise where Hband is attenuated + if (flagHbandCn == 1) { + fft[i] *= scale; // fft scaling + dtmp += cnScaleHband * fft[i]; + } - fft[PART_LEN + i] *= scale; // fft scaling - aec->outBuf[i] = fft[PART_LEN + i] * sqrtHanning[PART_LEN - i]; + // Saturation protection + outputH[i] = (short)WEBRTC_SPL_SAT( + WEBRTC_SPL_WORD16_MAX, dtmp, WEBRTC_SPL_WORD16_MIN); } + } - // For H band - if (aec->sampFreq == 32000) { + // Copy the current block to the old position. + memcpy(aec->dBuf, aec->dBuf + PART_LEN, sizeof(float) * PART_LEN); + memcpy(aec->eBuf, aec->eBuf + PART_LEN, sizeof(float) * PART_LEN); - // H band gain - // average nlp over low band: average over second half of freq spectrum - // (4->8khz) - GetHighbandGain(hNl, &nlpGainHband); + // Copy the current block to the old position for H band + if (aec->sampFreq == 32000) { + memcpy(aec->dBufH, aec->dBufH + PART_LEN, sizeof(float) * PART_LEN); + } - // Inverse comfort_noise - if (flagHbandCn == 1) { - fft[0] = comfortNoiseHband[0][0]; - fft[1] = comfortNoiseHband[PART_LEN][0]; - for (i = 1; i < PART_LEN; i++) { - fft[2*i] = comfortNoiseHband[i][0]; - fft[2*i + 1] = comfortNoiseHband[i][1]; - } - aec_rdft_inverse_128(fft); - scale = 2.0f / PART_LEN2; - } - - // compute gain factor - for (i = 0; i < PART_LEN; i++) { - dtmp = (float)aec->dBufH[i]; - dtmp = (float)dtmp * nlpGainHband; // for variable gain - - // add some comfort noise where Hband is attenuated - if (flagHbandCn == 1) { - fft[i] *= scale; // fft scaling - dtmp += cnScaleHband * fft[i]; - } - - // Saturation protection - outputH[i] = (short)WEBRTC_SPL_SAT(WEBRTC_SPL_WORD16_MAX, dtmp, - WEBRTC_SPL_WORD16_MIN); - } - } - - // Copy the current block to the old position. - memcpy(aec->dBuf, aec->dBuf + PART_LEN, sizeof(float) * PART_LEN); - memcpy(aec->eBuf, aec->eBuf + PART_LEN, sizeof(float) * PART_LEN); - - // Copy the current block to the old position for H band - if (aec->sampFreq == 32000) { - memcpy(aec->dBufH, aec->dBufH + PART_LEN, sizeof(float) * PART_LEN); - } - - memmove(aec->xfwBuf + PART_LEN1, aec->xfwBuf, sizeof(aec->xfwBuf) - - sizeof(complex_t) * PART_LEN1); + memmove(aec->xfwBuf + PART_LEN1, + aec->xfwBuf, + sizeof(aec->xfwBuf) - sizeof(complex_t) * PART_LEN1); } -static void GetHighbandGain(const float *lambda, float *nlpGainHband) -{ - int i; +static void GetHighbandGain(const float* lambda, float* nlpGainHband) { + int i; - nlpGainHband[0] = (float)0.0; - for (i = freqAvgIc; i < PART_LEN1 - 1; i++) { - nlpGainHband[0] += lambda[i]; - } - nlpGainHband[0] /= (float)(PART_LEN1 - 1 - freqAvgIc); + nlpGainHband[0] = (float)0.0; + for (i = freqAvgIc; i < PART_LEN1 - 1; i++) { + nlpGainHband[0] += lambda[i]; + } + nlpGainHband[0] /= (float)(PART_LEN1 - 1 - freqAvgIc); } -static void ComfortNoise(AecCore* aec, float efw[2][PART_LEN1], - complex_t *comfortNoiseHband, const float *noisePow, const float *lambda) -{ - int i, num; - float rand[PART_LEN]; - float noise, noiseAvg, tmp, tmpAvg; - int16_t randW16[PART_LEN]; - complex_t u[PART_LEN1]; +static void ComfortNoise(AecCore* aec, + float efw[2][PART_LEN1], + complex_t* comfortNoiseHband, + const float* noisePow, + const float* lambda) { + int i, num; + float rand[PART_LEN]; + float noise, noiseAvg, tmp, tmpAvg; + int16_t randW16[PART_LEN]; + complex_t u[PART_LEN1]; - const float pi2 = 6.28318530717959f; + const float pi2 = 6.28318530717959f; - // Generate a uniform random array on [0 1] - WebRtcSpl_RandUArray(randW16, PART_LEN, &aec->seed); - for (i = 0; i < PART_LEN; i++) { - rand[i] = ((float)randW16[i]) / 32768; + // Generate a uniform random array on [0 1] + WebRtcSpl_RandUArray(randW16, PART_LEN, &aec->seed); + for (i = 0; i < PART_LEN; i++) { + rand[i] = ((float)randW16[i]) / 32768; + } + + // Reject LF noise + u[0][0] = 0; + u[0][1] = 0; + for (i = 1; i < PART_LEN1; i++) { + tmp = pi2 * rand[i - 1]; + + noise = sqrtf(noisePow[i]); + u[i][0] = noise * cosf(tmp); + u[i][1] = -noise * sinf(tmp); + } + u[PART_LEN][1] = 0; + + for (i = 0; i < PART_LEN1; i++) { + // This is the proper weighting to match the background noise power + tmp = sqrtf(WEBRTC_SPL_MAX(1 - lambda[i] * lambda[i], 0)); + // tmp = 1 - lambda[i]; + efw[0][i] += tmp * u[i][0]; + efw[1][i] += tmp * u[i][1]; + } + + // For H band comfort noise + // TODO: don't compute noise and "tmp" twice. Use the previous results. + noiseAvg = 0.0; + tmpAvg = 0.0; + num = 0; + if (aec->sampFreq == 32000 && flagHbandCn == 1) { + + // average noise scale + // average over second half of freq spectrum (i.e., 4->8khz) + // TODO: we shouldn't need num. We know how many elements we're summing. + for (i = PART_LEN1 >> 1; i < PART_LEN1; i++) { + num++; + noiseAvg += sqrtf(noisePow[i]); } + noiseAvg /= (float)num; + // average nlp scale + // average over second half of freq spectrum (i.e., 4->8khz) + // TODO: we shouldn't need num. We know how many elements we're summing. + num = 0; + for (i = PART_LEN1 >> 1; i < PART_LEN1; i++) { + num++; + tmpAvg += sqrtf(WEBRTC_SPL_MAX(1 - lambda[i] * lambda[i], 0)); + } + tmpAvg /= (float)num; + + // Use average noise for H band + // TODO: we should probably have a new random vector here. // Reject LF noise u[0][0] = 0; u[0][1] = 0; for (i = 1; i < PART_LEN1; i++) { - tmp = pi2 * rand[i - 1]; + tmp = pi2 * rand[i - 1]; - noise = sqrtf(noisePow[i]); - u[i][0] = noise * cosf(tmp); - u[i][1] = -noise * sinf(tmp); + // Use average noise for H band + u[i][0] = noiseAvg * (float)cos(tmp); + u[i][1] = -noiseAvg * (float)sin(tmp); } u[PART_LEN][1] = 0; for (i = 0; i < PART_LEN1; i++) { - // This is the proper weighting to match the background noise power - tmp = sqrtf(WEBRTC_SPL_MAX(1 - lambda[i] * lambda[i], 0)); - //tmp = 1 - lambda[i]; - efw[0][i] += tmp * u[i][0]; - efw[1][i] += tmp * u[i][1]; - } - - // For H band comfort noise - // TODO: don't compute noise and "tmp" twice. Use the previous results. - noiseAvg = 0.0; - tmpAvg = 0.0; - num = 0; - if (aec->sampFreq == 32000 && flagHbandCn == 1) { - - // average noise scale - // average over second half of freq spectrum (i.e., 4->8khz) - // TODO: we shouldn't need num. We know how many elements we're summing. - for (i = PART_LEN1 >> 1; i < PART_LEN1; i++) { - num++; - noiseAvg += sqrtf(noisePow[i]); - } - noiseAvg /= (float)num; - - // average nlp scale - // average over second half of freq spectrum (i.e., 4->8khz) - // TODO: we shouldn't need num. We know how many elements we're summing. - num = 0; - for (i = PART_LEN1 >> 1; i < PART_LEN1; i++) { - num++; - tmpAvg += sqrtf(WEBRTC_SPL_MAX(1 - lambda[i] * lambda[i], 0)); - } - tmpAvg /= (float)num; - - // Use average noise for H band - // TODO: we should probably have a new random vector here. - // Reject LF noise - u[0][0] = 0; - u[0][1] = 0; - for (i = 1; i < PART_LEN1; i++) { - tmp = pi2 * rand[i - 1]; - - // Use average noise for H band - u[i][0] = noiseAvg * (float)cos(tmp); - u[i][1] = -noiseAvg * (float)sin(tmp); - } - u[PART_LEN][1] = 0; - - for (i = 0; i < PART_LEN1; i++) { - // Use average NLP weight for H band - comfortNoiseHband[i][0] = tmpAvg * u[i][0]; - comfortNoiseHband[i][1] = tmpAvg * u[i][1]; - } + // Use average NLP weight for H band + comfortNoiseHband[i][0] = tmpAvg * u[i][0]; + comfortNoiseHband[i][1] = tmpAvg * u[i][1]; } + } } static void InitLevel(PowerLevel* level) { @@ -1562,130 +1558,132 @@ static void UpdateLevel(PowerLevel* level, float in[2][PART_LEN1]) { } } -static void UpdateMetrics(AecCore* aec) -{ - float dtmp, dtmp2; +static void UpdateMetrics(AecCore* aec) { + float dtmp, dtmp2; - const float actThresholdNoisy = 8.0f; - const float actThresholdClean = 40.0f; - const float safety = 0.99995f; - const float noisyPower = 300000.0f; + const float actThresholdNoisy = 8.0f; + const float actThresholdClean = 40.0f; + const float safety = 0.99995f; + const float noisyPower = 300000.0f; - float actThreshold; - float echo, suppressedEcho; + float actThreshold; + float echo, suppressedEcho; - if (aec->echoState) { // Check if echo is likely present - aec->stateCounter++; + if (aec->echoState) { // Check if echo is likely present + aec->stateCounter++; + } + + if (aec->farlevel.frcounter == 0) { + + if (aec->farlevel.minlevel < noisyPower) { + actThreshold = actThresholdClean; + } else { + actThreshold = actThresholdNoisy; } - if (aec->farlevel.frcounter == 0) { + if ((aec->stateCounter > (0.5f * countLen * subCountLen)) && + (aec->farlevel.sfrcounter == 0) - if (aec->farlevel.minlevel < noisyPower) { - actThreshold = actThresholdClean; - } - else { - actThreshold = actThresholdNoisy; - } + // Estimate in active far-end segments only + && + (aec->farlevel.averagelevel > + (actThreshold * aec->farlevel.minlevel))) { - if ((aec->stateCounter > (0.5f * countLen * subCountLen)) - && (aec->farlevel.sfrcounter == 0) + // Subtract noise power + echo = aec->nearlevel.averagelevel - safety * aec->nearlevel.minlevel; - // Estimate in active far-end segments only - && (aec->farlevel.averagelevel > (actThreshold * aec->farlevel.minlevel)) - ) { + // ERL + dtmp = 10 * (float)log10(aec->farlevel.averagelevel / + aec->nearlevel.averagelevel + + 1e-10f); + dtmp2 = 10 * (float)log10(aec->farlevel.averagelevel / echo + 1e-10f); - // Subtract noise power - echo = aec->nearlevel.averagelevel - safety * aec->nearlevel.minlevel; + aec->erl.instant = dtmp; + if (dtmp > aec->erl.max) { + aec->erl.max = dtmp; + } - // ERL - dtmp = 10 * (float)log10(aec->farlevel.averagelevel / - aec->nearlevel.averagelevel + 1e-10f); - dtmp2 = 10 * (float)log10(aec->farlevel.averagelevel / echo + 1e-10f); + if (dtmp < aec->erl.min) { + aec->erl.min = dtmp; + } - aec->erl.instant = dtmp; - if (dtmp > aec->erl.max) { - aec->erl.max = dtmp; - } + aec->erl.counter++; + aec->erl.sum += dtmp; + aec->erl.average = aec->erl.sum / aec->erl.counter; - if (dtmp < aec->erl.min) { - aec->erl.min = dtmp; - } + // Upper mean + if (dtmp > aec->erl.average) { + aec->erl.hicounter++; + aec->erl.hisum += dtmp; + aec->erl.himean = aec->erl.hisum / aec->erl.hicounter; + } - aec->erl.counter++; - aec->erl.sum += dtmp; - aec->erl.average = aec->erl.sum / aec->erl.counter; + // A_NLP + dtmp = 10 * (float)log10(aec->nearlevel.averagelevel / + (2 * aec->linoutlevel.averagelevel) + + 1e-10f); - // Upper mean - if (dtmp > aec->erl.average) { - aec->erl.hicounter++; - aec->erl.hisum += dtmp; - aec->erl.himean = aec->erl.hisum / aec->erl.hicounter; - } + // subtract noise power + suppressedEcho = 2 * (aec->linoutlevel.averagelevel - + safety * aec->linoutlevel.minlevel); - // A_NLP - dtmp = 10 * (float)log10(aec->nearlevel.averagelevel / - (2 * aec->linoutlevel.averagelevel) + 1e-10f); + dtmp2 = 10 * (float)log10(echo / suppressedEcho + 1e-10f); - // subtract noise power - suppressedEcho = 2 * (aec->linoutlevel.averagelevel - - safety * aec->linoutlevel.minlevel); + aec->aNlp.instant = dtmp2; + if (dtmp > aec->aNlp.max) { + aec->aNlp.max = dtmp; + } - dtmp2 = 10 * (float)log10(echo / suppressedEcho + 1e-10f); + if (dtmp < aec->aNlp.min) { + aec->aNlp.min = dtmp; + } - aec->aNlp.instant = dtmp2; - if (dtmp > aec->aNlp.max) { - aec->aNlp.max = dtmp; - } + aec->aNlp.counter++; + aec->aNlp.sum += dtmp; + aec->aNlp.average = aec->aNlp.sum / aec->aNlp.counter; - if (dtmp < aec->aNlp.min) { - aec->aNlp.min = dtmp; - } + // Upper mean + if (dtmp > aec->aNlp.average) { + aec->aNlp.hicounter++; + aec->aNlp.hisum += dtmp; + aec->aNlp.himean = aec->aNlp.hisum / aec->aNlp.hicounter; + } - aec->aNlp.counter++; - aec->aNlp.sum += dtmp; - aec->aNlp.average = aec->aNlp.sum / aec->aNlp.counter; + // ERLE - // Upper mean - if (dtmp > aec->aNlp.average) { - aec->aNlp.hicounter++; - aec->aNlp.hisum += dtmp; - aec->aNlp.himean = aec->aNlp.hisum / aec->aNlp.hicounter; - } + // subtract noise power + suppressedEcho = 2 * (aec->nlpoutlevel.averagelevel - + safety * aec->nlpoutlevel.minlevel); - // ERLE + dtmp = 10 * (float)log10(aec->nearlevel.averagelevel / + (2 * aec->nlpoutlevel.averagelevel) + + 1e-10f); + dtmp2 = 10 * (float)log10(echo / suppressedEcho + 1e-10f); - // subtract noise power - suppressedEcho = 2 * (aec->nlpoutlevel.averagelevel - - safety * aec->nlpoutlevel.minlevel); + dtmp = dtmp2; + aec->erle.instant = dtmp; + if (dtmp > aec->erle.max) { + aec->erle.max = dtmp; + } - dtmp = 10 * (float)log10(aec->nearlevel.averagelevel / - (2 * aec->nlpoutlevel.averagelevel) + 1e-10f); - dtmp2 = 10 * (float)log10(echo / suppressedEcho + 1e-10f); + if (dtmp < aec->erle.min) { + aec->erle.min = dtmp; + } - dtmp = dtmp2; - aec->erle.instant = dtmp; - if (dtmp > aec->erle.max) { - aec->erle.max = dtmp; - } + aec->erle.counter++; + aec->erle.sum += dtmp; + aec->erle.average = aec->erle.sum / aec->erle.counter; - if (dtmp < aec->erle.min) { - aec->erle.min = dtmp; - } - - aec->erle.counter++; - aec->erle.sum += dtmp; - aec->erle.average = aec->erle.sum / aec->erle.counter; - - // Upper mean - if (dtmp > aec->erle.average) { - aec->erle.hicounter++; - aec->erle.hisum += dtmp; - aec->erle.himean = aec->erle.hisum / aec->erle.hicounter; - } - } - - aec->stateCounter = 0; + // Upper mean + if (dtmp > aec->erle.average) { + aec->erle.hicounter++; + aec->erle.hisum += dtmp; + aec->erle.himean = aec->erle.hisum / aec->erle.hicounter; + } } + + aec->stateCounter = 0; + } } static void TimeToFrequency(float time_data[PART_LEN2], diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core.h b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core.h index f83c37c8c4bf..d3c6d7e2b2e4 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core.h +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core.h @@ -18,14 +18,20 @@ #include "webrtc/typedefs.h" #define FRAME_LEN 80 -#define PART_LEN 64 // Length of partition +#define PART_LEN 64 // Length of partition #define PART_LEN1 (PART_LEN + 1) // Unique fft coefficients #define PART_LEN2 (PART_LEN * 2) // Length of partition * 2 // Delay estimator constants, used for logging. -enum { kMaxDelayBlocks = 60 }; -enum { kLookaheadBlocks = 15 }; -enum { kHistorySizeBlocks = kMaxDelayBlocks + kLookaheadBlocks }; +enum { + kMaxDelayBlocks = 60 +}; +enum { + kLookaheadBlocks = 15 +}; +enum { + kHistorySizeBlocks = kMaxDelayBlocks + kLookaheadBlocks +}; typedef float complex_t[2]; // For performance reasons, some arrays of complex numbers are replaced by twice @@ -37,7 +43,9 @@ typedef float complex_t[2]; // compile time. // Metrics -enum { kOffsetLevel = -100 }; +enum { + kOffsetLevel = -100 +}; typedef struct Stats { float instant; @@ -79,14 +87,18 @@ int WebRtcAec_GetDelayMetricsCore(AecCore* self, int* median, int* std); int WebRtcAec_echo_state(AecCore* self); // Gets statistics of the echo metrics ERL, ERLE, A_NLP. -void WebRtcAec_GetEchoStats(AecCore* self, Stats* erl, Stats* erle, +void WebRtcAec_GetEchoStats(AecCore* self, + Stats* erl, + Stats* erle, Stats* a_nlp); #ifdef WEBRTC_AEC_DEBUG_DUMP void* WebRtcAec_far_time_buf(AecCore* self); #endif // Sets local configuration modes. -void WebRtcAec_SetConfigCore(AecCore* self, int nlp_mode, int metrics_mode, +void WebRtcAec_SetConfigCore(AecCore* self, + int nlp_mode, + int metrics_mode, int delay_logging); // We now interpret delay correction to mean an extended filter length feature. diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core_internal.h b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core_internal.h index fd0e7847494c..193369382ca0 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core_internal.h +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core_internal.h @@ -19,12 +19,11 @@ #include "webrtc/modules/audio_processing/utility/ring_buffer.h" #include "webrtc/typedefs.h" -#define NR_PART 12 // Number of partitions in filter. -#define PREF_BAND_SIZE 24 - // Number of partitions for the extended filter mode. The first one is an enum // to be used in array declarations, as it represents the maximum filter length. -enum { kExtendedNumPartitions = 32 }; +enum { + kExtendedNumPartitions = 32 +}; static const int kNormalNumPartitions = 12; // Extended filter adaptation parameters. @@ -64,7 +63,7 @@ struct AecCore { float dPow[PART_LEN1]; float dMinPow[PART_LEN1]; float dInitMinPow[PART_LEN1]; - float *noisePow; + float* noisePow; float xfBuf[2][kExtendedNumPartitions * PART_LEN1]; // farend fft buffer float wfBuf[2][kExtendedNumPartitions * PART_LEN1]; // filter fft @@ -96,7 +95,7 @@ struct AecCore { int sampFreq; uint32_t seed; - float normal_mu; // stepsize + float normal_mu; // stepsize float normal_error_threshold; // error threshold int noiseEstCtr; @@ -114,8 +113,8 @@ struct AecCore { Stats rerl; // Quantities to control H band scaling for SWB input - int freq_avg_ic; // initial bin for averaging nlp gain - int flag_Hband_cn; // for comfort noise + int freq_avg_ic; // initial bin for averaging nlp gain + int flag_Hband_cn; // for comfort noise float cn_scale_Hband; // scale for comfort noise in H band int delay_histogram[kHistorySizeBlocks]; @@ -130,24 +129,26 @@ struct AecCore { #ifdef WEBRTC_AEC_DEBUG_DUMP RingBuffer* far_time_buf; - FILE *farFile; - FILE *nearFile; - FILE *outFile; - FILE *outLinearFile; + FILE* farFile; + FILE* nearFile; + FILE* outFile; + FILE* outLinearFile; #endif }; typedef void (*WebRtcAec_FilterFar_t)(AecCore* aec, float yf[2][PART_LEN1]); extern WebRtcAec_FilterFar_t WebRtcAec_FilterFar; -typedef void (*WebRtcAec_ScaleErrorSignal_t) - (AecCore* aec, float ef[2][PART_LEN1]); +typedef void (*WebRtcAec_ScaleErrorSignal_t)(AecCore* aec, + float ef[2][PART_LEN1]); extern WebRtcAec_ScaleErrorSignal_t WebRtcAec_ScaleErrorSignal; -typedef void (*WebRtcAec_FilterAdaptation_t) - (AecCore* aec, float *fft, float ef[2][PART_LEN1]); +typedef void (*WebRtcAec_FilterAdaptation_t)(AecCore* aec, + float* fft, + float ef[2][PART_LEN1]); extern WebRtcAec_FilterAdaptation_t WebRtcAec_FilterAdaptation; -typedef void (*WebRtcAec_OverdriveAndSuppress_t) - (AecCore* aec, float hNl[PART_LEN1], const float hNlFb, - float efw[2][PART_LEN1]); +typedef void (*WebRtcAec_OverdriveAndSuppress_t)(AecCore* aec, + float hNl[PART_LEN1], + const float hNlFb, + float efw[2][PART_LEN1]); extern WebRtcAec_OverdriveAndSuppress_t WebRtcAec_OverdriveAndSuppress; #endif // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_CORE_INTERNAL_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core_sse2.c b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core_sse2.c index 61602a823421..e73bdd5f2c0e 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core_sse2.c +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core_sse2.c @@ -21,18 +21,15 @@ #include "webrtc/modules/audio_processing/aec/aec_core_internal.h" #include "webrtc/modules/audio_processing/aec/aec_rdft.h" -__inline static float MulRe(float aRe, float aIm, float bRe, float bIm) -{ +__inline static float MulRe(float aRe, float aIm, float bRe, float bIm) { return aRe * bRe - aIm * bIm; } -__inline static float MulIm(float aRe, float aIm, float bRe, float bIm) -{ +__inline static float MulIm(float aRe, float aIm, float bRe, float bIm) { return aRe * bIm + aIm * bRe; } -static void FilterFarSSE2(AecCore* aec, float yf[2][PART_LEN1]) -{ +static void FilterFarSSE2(AecCore* aec, float yf[2][PART_LEN1]) { int i; const int num_partitions = aec->num_partitions; for (i = 0; i < num_partitions; i++) { @@ -41,7 +38,7 @@ static void FilterFarSSE2(AecCore* aec, float yf[2][PART_LEN1]) int pos = i * PART_LEN1; // Check for wrap if (i + aec->xfBufBlockPos >= num_partitions) { - xPos -= num_partitions*(PART_LEN1); + xPos -= num_partitions * (PART_LEN1); } // vectorized code (four at once) @@ -65,22 +62,25 @@ static void FilterFarSSE2(AecCore* aec, float yf[2][PART_LEN1]) } // scalar code for the remaining items. for (; j < PART_LEN1; j++) { - yf[0][j] += MulRe(aec->xfBuf[0][xPos + j], aec->xfBuf[1][xPos + j], - aec->wfBuf[0][ pos + j], aec->wfBuf[1][ pos + j]); - yf[1][j] += MulIm(aec->xfBuf[0][xPos + j], aec->xfBuf[1][xPos + j], - aec->wfBuf[0][ pos + j], aec->wfBuf[1][ pos + j]); + yf[0][j] += MulRe(aec->xfBuf[0][xPos + j], + aec->xfBuf[1][xPos + j], + aec->wfBuf[0][pos + j], + aec->wfBuf[1][pos + j]); + yf[1][j] += MulIm(aec->xfBuf[0][xPos + j], + aec->xfBuf[1][xPos + j], + aec->wfBuf[0][pos + j], + aec->wfBuf[1][pos + j]); } } } -static void ScaleErrorSignalSSE2(AecCore* aec, float ef[2][PART_LEN1]) -{ +static void ScaleErrorSignalSSE2(AecCore* aec, float ef[2][PART_LEN1]) { const __m128 k1e_10f = _mm_set1_ps(1e-10f); - const __m128 kMu = aec->extended_filter_enabled ? - _mm_set1_ps(kExtendedMu) : _mm_set1_ps(aec->normal_mu); - const __m128 kThresh = aec->extended_filter_enabled ? - _mm_set1_ps(kExtendedErrorThreshold) : - _mm_set1_ps(aec->normal_error_threshold); + const __m128 kMu = aec->extended_filter_enabled ? _mm_set1_ps(kExtendedMu) + : _mm_set1_ps(aec->normal_mu); + const __m128 kThresh = aec->extended_filter_enabled + ? _mm_set1_ps(kExtendedErrorThreshold) + : _mm_set1_ps(aec->normal_error_threshold); int i; // vectorized code (four at once) @@ -115,12 +115,13 @@ static void ScaleErrorSignalSSE2(AecCore* aec, float ef[2][PART_LEN1]) } // scalar code for the remaining items. { - const float mu = aec->extended_filter_enabled ? - kExtendedMu : aec->normal_mu; - const float error_threshold = aec->extended_filter_enabled ? - kExtendedErrorThreshold : aec->normal_error_threshold; + const float mu = + aec->extended_filter_enabled ? kExtendedMu : aec->normal_mu; + const float error_threshold = aec->extended_filter_enabled + ? kExtendedErrorThreshold + : aec->normal_error_threshold; for (; i < (PART_LEN1); i++) { - float abs_ef; + float abs_ef; ef[0][i] /= (aec->xPow[i] + 1e-10f); ef[1][i] /= (aec->xPow[i] + 1e-10f); abs_ef = sqrtf(ef[0][i] * ef[0][i] + ef[1][i] * ef[1][i]); @@ -138,11 +139,13 @@ static void ScaleErrorSignalSSE2(AecCore* aec, float ef[2][PART_LEN1]) } } -static void FilterAdaptationSSE2(AecCore* aec, float *fft, float ef[2][PART_LEN1]) { +static void FilterAdaptationSSE2(AecCore* aec, + float* fft, + float ef[2][PART_LEN1]) { int i, j; const int num_partitions = aec->num_partitions; for (i = 0; i < num_partitions; i++) { - int xPos = (i + aec->xfBufBlockPos)*(PART_LEN1); + int xPos = (i + aec->xfBufBlockPos) * (PART_LEN1); int pos = i * PART_LEN1; // Check for wrap if (i + aec->xfBufBlockPos >= num_partitions) { @@ -150,7 +153,7 @@ static void FilterAdaptationSSE2(AecCore* aec, float *fft, float ef[2][PART_LEN1 } // Process the whole array... - for (j = 0; j < PART_LEN; j+= 4) { + for (j = 0; j < PART_LEN; j += 4) { // Load xfBuf and ef. const __m128 xfBuf_re = _mm_loadu_ps(&aec->xfBuf[0][xPos + j]); const __m128 xfBuf_im = _mm_loadu_ps(&aec->xfBuf[1][xPos + j]); @@ -169,22 +172,23 @@ static void FilterAdaptationSSE2(AecCore* aec, float *fft, float ef[2][PART_LEN1 const __m128 g = _mm_unpacklo_ps(e, f); const __m128 h = _mm_unpackhi_ps(e, f); // Store - _mm_storeu_ps(&fft[2*j + 0], g); - _mm_storeu_ps(&fft[2*j + 4], h); + _mm_storeu_ps(&fft[2 * j + 0], g); + _mm_storeu_ps(&fft[2 * j + 4], h); } // ... and fixup the first imaginary entry. fft[1] = MulRe(aec->xfBuf[0][xPos + PART_LEN], -aec->xfBuf[1][xPos + PART_LEN], - ef[0][PART_LEN], ef[1][PART_LEN]); + ef[0][PART_LEN], + ef[1][PART_LEN]); aec_rdft_inverse_128(fft); - memset(fft + PART_LEN, 0, sizeof(float)*PART_LEN); + memset(fft + PART_LEN, 0, sizeof(float) * PART_LEN); // fft scaling { float scale = 2.0f / PART_LEN2; const __m128 scale_ps = _mm_load_ps1(&scale); - for (j = 0; j < PART_LEN; j+=4) { + for (j = 0; j < PART_LEN; j += 4) { const __m128 fft_ps = _mm_loadu_ps(&fft[j]); const __m128 fft_scale = _mm_mul_ps(fft_ps, scale_ps); _mm_storeu_ps(&fft[j], fft_scale); @@ -195,13 +199,15 @@ static void FilterAdaptationSSE2(AecCore* aec, float *fft, float ef[2][PART_LEN1 { float wt1 = aec->wfBuf[1][pos]; aec->wfBuf[0][pos + PART_LEN] += fft[1]; - for (j = 0; j < PART_LEN; j+= 4) { + for (j = 0; j < PART_LEN; j += 4) { __m128 wtBuf_re = _mm_loadu_ps(&aec->wfBuf[0][pos + j]); __m128 wtBuf_im = _mm_loadu_ps(&aec->wfBuf[1][pos + j]); const __m128 fft0 = _mm_loadu_ps(&fft[2 * j + 0]); const __m128 fft4 = _mm_loadu_ps(&fft[2 * j + 4]); - const __m128 fft_re = _mm_shuffle_ps(fft0, fft4, _MM_SHUFFLE(2, 0, 2 ,0)); - const __m128 fft_im = _mm_shuffle_ps(fft0, fft4, _MM_SHUFFLE(3, 1, 3 ,1)); + const __m128 fft_re = + _mm_shuffle_ps(fft0, fft4, _MM_SHUFFLE(2, 0, 2, 0)); + const __m128 fft_im = + _mm_shuffle_ps(fft0, fft4, _MM_SHUFFLE(3, 1, 3, 1)); wtBuf_re = _mm_add_ps(wtBuf_re, fft_re); wtBuf_im = _mm_add_ps(wtBuf_im, fft_im); _mm_storeu_ps(&aec->wfBuf[0][pos + j], wtBuf_re); @@ -212,8 +218,7 @@ static void FilterAdaptationSSE2(AecCore* aec, float *fft, float ef[2][PART_LEN1 } } -static __m128 mm_pow_ps(__m128 a, __m128 b) -{ +static __m128 mm_pow_ps(__m128 a, __m128 b) { // a^b = exp2(b * log2(a)) // exp2(x) and log2(x) are calculated using polynomial approximations. __m128 log2_a, b_log2_a, a_exp_b; @@ -238,55 +243,55 @@ static __m128 mm_pow_ps(__m128 a, __m128 b) // compensate the fact that the exponent has been shifted in the top/ // fractional part and finally getting rid of the implicit leading one // from the mantissa by substracting it out. - static const ALIGN16_BEG int float_exponent_mask[4] ALIGN16_END = - {0x7F800000, 0x7F800000, 0x7F800000, 0x7F800000}; - static const ALIGN16_BEG int eight_biased_exponent[4] ALIGN16_END = - {0x43800000, 0x43800000, 0x43800000, 0x43800000}; - static const ALIGN16_BEG int implicit_leading_one[4] ALIGN16_END = - {0x43BF8000, 0x43BF8000, 0x43BF8000, 0x43BF8000}; + static const ALIGN16_BEG int float_exponent_mask[4] ALIGN16_END = { + 0x7F800000, 0x7F800000, 0x7F800000, 0x7F800000}; + static const ALIGN16_BEG int eight_biased_exponent[4] ALIGN16_END = { + 0x43800000, 0x43800000, 0x43800000, 0x43800000}; + static const ALIGN16_BEG int implicit_leading_one[4] ALIGN16_END = { + 0x43BF8000, 0x43BF8000, 0x43BF8000, 0x43BF8000}; static const int shift_exponent_into_top_mantissa = 8; - const __m128 two_n = _mm_and_ps(a, *((__m128 *)float_exponent_mask)); - const __m128 n_1 = _mm_castsi128_ps(_mm_srli_epi32(_mm_castps_si128(two_n), - shift_exponent_into_top_mantissa)); - const __m128 n_0 = _mm_or_ps(n_1, *((__m128 *)eight_biased_exponent)); - const __m128 n = _mm_sub_ps(n_0, *((__m128 *)implicit_leading_one)); + const __m128 two_n = _mm_and_ps(a, *((__m128*)float_exponent_mask)); + const __m128 n_1 = _mm_castsi128_ps(_mm_srli_epi32( + _mm_castps_si128(two_n), shift_exponent_into_top_mantissa)); + const __m128 n_0 = _mm_or_ps(n_1, *((__m128*)eight_biased_exponent)); + const __m128 n = _mm_sub_ps(n_0, *((__m128*)implicit_leading_one)); // Compute y. - static const ALIGN16_BEG int mantissa_mask[4] ALIGN16_END = - {0x007FFFFF, 0x007FFFFF, 0x007FFFFF, 0x007FFFFF}; - static const ALIGN16_BEG int zero_biased_exponent_is_one[4] ALIGN16_END = - {0x3F800000, 0x3F800000, 0x3F800000, 0x3F800000}; - const __m128 mantissa = _mm_and_ps(a, *((__m128 *)mantissa_mask)); - const __m128 y = _mm_or_ps( - mantissa, *((__m128 *)zero_biased_exponent_is_one)); + static const ALIGN16_BEG int mantissa_mask[4] ALIGN16_END = { + 0x007FFFFF, 0x007FFFFF, 0x007FFFFF, 0x007FFFFF}; + static const ALIGN16_BEG int zero_biased_exponent_is_one[4] ALIGN16_END = { + 0x3F800000, 0x3F800000, 0x3F800000, 0x3F800000}; + const __m128 mantissa = _mm_and_ps(a, *((__m128*)mantissa_mask)); + const __m128 y = + _mm_or_ps(mantissa, *((__m128*)zero_biased_exponent_is_one)); // Approximate log2(y) ~= (y - 1) * pol5(y). // pol5(y) = C5 * y^5 + C4 * y^4 + C3 * y^3 + C2 * y^2 + C1 * y + C0 - static const ALIGN16_BEG float ALIGN16_END C5[4] = - {-3.4436006e-2f, -3.4436006e-2f, -3.4436006e-2f, -3.4436006e-2f}; - static const ALIGN16_BEG float ALIGN16_END C4[4] = - {3.1821337e-1f, 3.1821337e-1f, 3.1821337e-1f, 3.1821337e-1f}; - static const ALIGN16_BEG float ALIGN16_END C3[4] = - {-1.2315303f, -1.2315303f, -1.2315303f, -1.2315303f}; - static const ALIGN16_BEG float ALIGN16_END C2[4] = - {2.5988452f, 2.5988452f, 2.5988452f, 2.5988452f}; - static const ALIGN16_BEG float ALIGN16_END C1[4] = - {-3.3241990f, -3.3241990f, -3.3241990f, -3.3241990f}; - static const ALIGN16_BEG float ALIGN16_END C0[4] = - {3.1157899f, 3.1157899f, 3.1157899f, 3.1157899f}; - const __m128 pol5_y_0 = _mm_mul_ps(y, *((__m128 *)C5)); - const __m128 pol5_y_1 = _mm_add_ps(pol5_y_0, *((__m128 *)C4)); + static const ALIGN16_BEG float ALIGN16_END C5[4] = { + -3.4436006e-2f, -3.4436006e-2f, -3.4436006e-2f, -3.4436006e-2f}; + static const ALIGN16_BEG float ALIGN16_END + C4[4] = {3.1821337e-1f, 3.1821337e-1f, 3.1821337e-1f, 3.1821337e-1f}; + static const ALIGN16_BEG float ALIGN16_END + C3[4] = {-1.2315303f, -1.2315303f, -1.2315303f, -1.2315303f}; + static const ALIGN16_BEG float ALIGN16_END + C2[4] = {2.5988452f, 2.5988452f, 2.5988452f, 2.5988452f}; + static const ALIGN16_BEG float ALIGN16_END + C1[4] = {-3.3241990f, -3.3241990f, -3.3241990f, -3.3241990f}; + static const ALIGN16_BEG float ALIGN16_END + C0[4] = {3.1157899f, 3.1157899f, 3.1157899f, 3.1157899f}; + const __m128 pol5_y_0 = _mm_mul_ps(y, *((__m128*)C5)); + const __m128 pol5_y_1 = _mm_add_ps(pol5_y_0, *((__m128*)C4)); const __m128 pol5_y_2 = _mm_mul_ps(pol5_y_1, y); - const __m128 pol5_y_3 = _mm_add_ps(pol5_y_2, *((__m128 *)C3)); + const __m128 pol5_y_3 = _mm_add_ps(pol5_y_2, *((__m128*)C3)); const __m128 pol5_y_4 = _mm_mul_ps(pol5_y_3, y); - const __m128 pol5_y_5 = _mm_add_ps(pol5_y_4, *((__m128 *)C2)); + const __m128 pol5_y_5 = _mm_add_ps(pol5_y_4, *((__m128*)C2)); const __m128 pol5_y_6 = _mm_mul_ps(pol5_y_5, y); - const __m128 pol5_y_7 = _mm_add_ps(pol5_y_6, *((__m128 *)C1)); + const __m128 pol5_y_7 = _mm_add_ps(pol5_y_6, *((__m128*)C1)); const __m128 pol5_y_8 = _mm_mul_ps(pol5_y_7, y); - const __m128 pol5_y = _mm_add_ps(pol5_y_8, *((__m128 *)C0)); - const __m128 y_minus_one = _mm_sub_ps( - y, *((__m128 *)zero_biased_exponent_is_one)); - const __m128 log2_y = _mm_mul_ps(y_minus_one , pol5_y); + const __m128 pol5_y = _mm_add_ps(pol5_y_8, *((__m128*)C0)); + const __m128 y_minus_one = + _mm_sub_ps(y, *((__m128*)zero_biased_exponent_is_one)); + const __m128 log2_y = _mm_mul_ps(y_minus_one, pol5_y); // Combine parts. log2_a = _mm_add_ps(n, log2_y); @@ -310,38 +315,38 @@ static __m128 mm_pow_ps(__m128 a, __m128 b) // maximum relative error of 0.17%. // To avoid over/underflow, we reduce the range of input to ]-127, 129]. - static const ALIGN16_BEG float max_input[4] ALIGN16_END = - {129.f, 129.f, 129.f, 129.f}; - static const ALIGN16_BEG float min_input[4] ALIGN16_END = - {-126.99999f, -126.99999f, -126.99999f, -126.99999f}; - const __m128 x_min = _mm_min_ps(b_log2_a, *((__m128 *)max_input)); - const __m128 x_max = _mm_max_ps(x_min, *((__m128 *)min_input)); + static const ALIGN16_BEG float max_input[4] ALIGN16_END = {129.f, 129.f, + 129.f, 129.f}; + static const ALIGN16_BEG float min_input[4] ALIGN16_END = { + -126.99999f, -126.99999f, -126.99999f, -126.99999f}; + const __m128 x_min = _mm_min_ps(b_log2_a, *((__m128*)max_input)); + const __m128 x_max = _mm_max_ps(x_min, *((__m128*)min_input)); // Compute n. - static const ALIGN16_BEG float half[4] ALIGN16_END = - {0.5f, 0.5f, 0.5f, 0.5f}; - const __m128 x_minus_half = _mm_sub_ps(x_max, *((__m128 *)half)); + static const ALIGN16_BEG float half[4] ALIGN16_END = {0.5f, 0.5f, + 0.5f, 0.5f}; + const __m128 x_minus_half = _mm_sub_ps(x_max, *((__m128*)half)); const __m128i x_minus_half_floor = _mm_cvtps_epi32(x_minus_half); // Compute 2^n. - static const ALIGN16_BEG int float_exponent_bias[4] ALIGN16_END = - {127, 127, 127, 127}; + static const ALIGN16_BEG int float_exponent_bias[4] ALIGN16_END = { + 127, 127, 127, 127}; static const int float_exponent_shift = 23; - const __m128i two_n_exponent = _mm_add_epi32( - x_minus_half_floor, *((__m128i *)float_exponent_bias)); - const __m128 two_n = _mm_castsi128_ps(_mm_slli_epi32( - two_n_exponent, float_exponent_shift)); + const __m128i two_n_exponent = + _mm_add_epi32(x_minus_half_floor, *((__m128i*)float_exponent_bias)); + const __m128 two_n = + _mm_castsi128_ps(_mm_slli_epi32(two_n_exponent, float_exponent_shift)); // Compute y. const __m128 y = _mm_sub_ps(x_max, _mm_cvtepi32_ps(x_minus_half_floor)); // Approximate 2^y ~= C2 * y^2 + C1 * y + C0. - static const ALIGN16_BEG float C2[4] ALIGN16_END = - {3.3718944e-1f, 3.3718944e-1f, 3.3718944e-1f, 3.3718944e-1f}; - static const ALIGN16_BEG float C1[4] ALIGN16_END = - {6.5763628e-1f, 6.5763628e-1f, 6.5763628e-1f, 6.5763628e-1f}; - static const ALIGN16_BEG float C0[4] ALIGN16_END = - {1.0017247f, 1.0017247f, 1.0017247f, 1.0017247f}; - const __m128 exp2_y_0 = _mm_mul_ps(y, *((__m128 *)C2)); - const __m128 exp2_y_1 = _mm_add_ps(exp2_y_0, *((__m128 *)C1)); + static const ALIGN16_BEG float C2[4] ALIGN16_END = { + 3.3718944e-1f, 3.3718944e-1f, 3.3718944e-1f, 3.3718944e-1f}; + static const ALIGN16_BEG float C1[4] ALIGN16_END = { + 6.5763628e-1f, 6.5763628e-1f, 6.5763628e-1f, 6.5763628e-1f}; + static const ALIGN16_BEG float C0[4] ALIGN16_END = {1.0017247f, 1.0017247f, + 1.0017247f, 1.0017247f}; + const __m128 exp2_y_0 = _mm_mul_ps(y, *((__m128*)C2)); + const __m128 exp2_y_1 = _mm_add_ps(exp2_y_0, *((__m128*)C1)); const __m128 exp2_y_2 = _mm_mul_ps(exp2_y_1, y); - const __m128 exp2_y = _mm_add_ps(exp2_y_2, *((__m128 *)C0)); + const __m128 exp2_y = _mm_add_ps(exp2_y_2, *((__m128*)C0)); // Combine parts. a_exp_b = _mm_mul_ps(exp2_y, two_n); @@ -352,7 +357,8 @@ static __m128 mm_pow_ps(__m128 a, __m128 b) extern const float WebRtcAec_weightCurve[65]; extern const float WebRtcAec_overDriveCurve[65]; -static void OverdriveAndSuppressSSE2(AecCore* aec, float hNl[PART_LEN1], +static void OverdriveAndSuppressSSE2(AecCore* aec, + float hNl[PART_LEN1], const float hNlFb, float efw[2][PART_LEN1]) { int i; @@ -361,26 +367,25 @@ static void OverdriveAndSuppressSSE2(AecCore* aec, float hNl[PART_LEN1], const __m128 vec_minus_one = _mm_set1_ps(-1.0f); const __m128 vec_overDriveSm = _mm_set1_ps(aec->overDriveSm); // vectorized code (four at once) - for (i = 0; i + 3 < PART_LEN1; i+=4) { + for (i = 0; i + 3 < PART_LEN1; i += 4) { // Weight subbands __m128 vec_hNl = _mm_loadu_ps(&hNl[i]); const __m128 vec_weightCurve = _mm_loadu_ps(&WebRtcAec_weightCurve[i]); const __m128 bigger = _mm_cmpgt_ps(vec_hNl, vec_hNlFb); - const __m128 vec_weightCurve_hNlFb = _mm_mul_ps( - vec_weightCurve, vec_hNlFb); + const __m128 vec_weightCurve_hNlFb = _mm_mul_ps(vec_weightCurve, vec_hNlFb); const __m128 vec_one_weightCurve = _mm_sub_ps(vec_one, vec_weightCurve); - const __m128 vec_one_weightCurve_hNl = _mm_mul_ps( - vec_one_weightCurve, vec_hNl); + const __m128 vec_one_weightCurve_hNl = + _mm_mul_ps(vec_one_weightCurve, vec_hNl); const __m128 vec_if0 = _mm_andnot_ps(bigger, vec_hNl); const __m128 vec_if1 = _mm_and_ps( bigger, _mm_add_ps(vec_weightCurve_hNlFb, vec_one_weightCurve_hNl)); vec_hNl = _mm_or_ps(vec_if0, vec_if1); { - const __m128 vec_overDriveCurve = _mm_loadu_ps( - &WebRtcAec_overDriveCurve[i]); - const __m128 vec_overDriveSm_overDriveCurve = _mm_mul_ps( - vec_overDriveSm, vec_overDriveCurve); + const __m128 vec_overDriveCurve = + _mm_loadu_ps(&WebRtcAec_overDriveCurve[i]); + const __m128 vec_overDriveSm_overDriveCurve = + _mm_mul_ps(vec_overDriveSm, vec_overDriveCurve); vec_hNl = mm_pow_ps(vec_hNl, vec_overDriveSm_overDriveCurve); _mm_storeu_ps(&hNl[i], vec_hNl); } @@ -404,7 +409,7 @@ static void OverdriveAndSuppressSSE2(AecCore* aec, float hNl[PART_LEN1], // Weight subbands if (hNl[i] > hNlFb) { hNl[i] = WebRtcAec_weightCurve[i] * hNlFb + - (1 - WebRtcAec_weightCurve[i]) * hNl[i]; + (1 - WebRtcAec_weightCurve[i]) * hNl[i]; } hNl[i] = powf(hNl[i], aec->overDriveSm * WebRtcAec_overDriveCurve[i]); diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_rdft.c b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_rdft.c index e63f36719c22..a19e8877bbc7 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_rdft.c +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_rdft.c @@ -42,7 +42,7 @@ ALIGN16_BEG float ALIGN16_END cftmdl_wk1r[4]; static int ip[16]; -static void bitrv2_32(int *ip, float *a) { +static void bitrv2_32(int* ip, float* a) { const int n = 32; int j, j1, k, k1, m, m2; float xr, xi, yr, yi; @@ -116,7 +116,7 @@ static void bitrv2_32(int *ip, float *a) { } } -static void bitrv2_128(float *a) { +static void bitrv2_128(float* a) { /* Following things have been attempted but are no faster: (a) Storing the swap indexes in a LUT (index calculations are done @@ -146,7 +146,7 @@ static void bitrv2_128(float *a) { a[j1 + 1] = yi; a[k1 + 0] = xr; a[k1 + 1] = xi; - j1 += 8; + j1 += 8; k1 += 16; xr = a[j1 + 0]; xi = a[j1 + 1]; @@ -166,7 +166,7 @@ static void bitrv2_128(float *a) { a[j1 + 1] = yi; a[k1 + 0] = xr; a[k1 + 1] = xi; - j1 += 8; + j1 += 8; k1 += 16; xr = a[j1 + 0]; xi = a[j1 + 1]; @@ -265,7 +265,7 @@ static void makewt_32(void) { } static void makect_32(void) { - float *c = rdft_w + 32; + float* c = rdft_w + 32; const int nc = 32; int j, nch; float delta; @@ -281,7 +281,7 @@ static void makect_32(void) { } } -static void cft1st_128_C(float *a) { +static void cft1st_128_C(float* a) { const int n = 128; int j, k1, k2; float wk1r, wk1i, wk2r, wk2i, wk3r, wk3i; @@ -385,7 +385,7 @@ static void cft1st_128_C(float *a) { } } -static void cftmdl_128_C(float *a) { +static void cftmdl_128_C(float* a) { const int l = 8; const int n = 128; const int m = 32; @@ -394,7 +394,7 @@ static void cftmdl_128_C(float *a) { float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i; for (j0 = 0; j0 < l; j0 += 2) { - j1 = j0 + 8; + j1 = j0 + 8; j2 = j0 + 16; j3 = j0 + 24; x0r = a[j0 + 0] + a[j1 + 0]; @@ -416,7 +416,7 @@ static void cftmdl_128_C(float *a) { } wk1r = rdft_w[2]; for (j0 = m; j0 < l + m; j0 += 2) { - j1 = j0 + 8; + j1 = j0 + 8; j2 = j0 + 16; j3 = j0 + 24; x0r = a[j0 + 0] + a[j1 + 0]; @@ -452,7 +452,7 @@ static void cftmdl_128_C(float *a) { wk3r = rdft_wk3ri_first[k1 + 0]; wk3i = rdft_wk3ri_first[k1 + 1]; for (j0 = k; j0 < l + k; j0 += 2) { - j1 = j0 + 8; + j1 = j0 + 8; j2 = j0 + 16; j3 = j0 + 24; x0r = a[j0 + 0] + a[j1 + 0]; @@ -483,7 +483,7 @@ static void cftmdl_128_C(float *a) { wk3r = rdft_wk3ri_second[k1 + 0]; wk3i = rdft_wk3ri_second[k1 + 1]; for (j0 = k + m; j0 < l + (k + m); j0 += 2) { - j1 = j0 + 8; + j1 = j0 + 8; j2 = j0 + 16; j3 = j0 + 24; x0r = a[j0 + 0] + a[j1 + 0]; @@ -512,7 +512,7 @@ static void cftmdl_128_C(float *a) { } } -static void cftfsub_128(float *a) { +static void cftfsub_128(float* a) { int j, j1, j2, j3, l; float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i; @@ -542,7 +542,7 @@ static void cftfsub_128(float *a) { } } -static void cftbsub_128(float *a) { +static void cftbsub_128(float* a) { int j, j1, j2, j3, l; float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i; @@ -573,14 +573,14 @@ static void cftbsub_128(float *a) { } } -static void rftfsub_128_C(float *a) { - const float *c = rdft_w + 32; +static void rftfsub_128_C(float* a) { + const float* c = rdft_w + 32; int j1, j2, k1, k2; float wkr, wki, xr, xi, yr, yi; for (j1 = 1, j2 = 2; j2 < 64; j1 += 1, j2 += 2) { k2 = 128 - j2; - k1 = 32 - j1; + k1 = 32 - j1; wkr = 0.5f - c[k1]; wki = c[j1]; xr = a[j2 + 0] - a[k2 + 0]; @@ -594,15 +594,15 @@ static void rftfsub_128_C(float *a) { } } -static void rftbsub_128_C(float *a) { - const float *c = rdft_w + 32; +static void rftbsub_128_C(float* a) { + const float* c = rdft_w + 32; int j1, j2, k1, k2; float wkr, wki, xr, xi, yr, yi; a[1] = -a[1]; for (j1 = 1, j2 = 2; j2 < 64; j1 += 1, j2 += 2) { k2 = 128 - j2; - k1 = 32 - j1; + k1 = 32 - j1; wkr = 0.5f - c[k1]; wki = c[j1]; xr = a[j2 + 0] - a[k2 + 0]; @@ -617,7 +617,7 @@ static void rftbsub_128_C(float *a) { a[65] = -a[65]; } -void aec_rdft_forward_128(float *a) { +void aec_rdft_forward_128(float* a) { float xi; bitrv2_128(a); cftfsub_128(a); @@ -627,7 +627,7 @@ void aec_rdft_forward_128(float *a) { a[1] = xi; } -void aec_rdft_inverse_128(float *a) { +void aec_rdft_inverse_128(float* a) { a[1] = 0.5f * (a[0] - a[1]); a[0] -= a[1]; rftbsub_128(a); diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_rdft.h b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_rdft.h index 91bedc9fc729..8a2e0b50716a 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_rdft.h +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_rdft.h @@ -20,11 +20,11 @@ static __inline __m128i _mm_castps_si128(__m128 a) { return *(__m128i*)&a; } #endif #ifdef _MSC_VER /* visual c++ */ -# define ALIGN16_BEG __declspec(align(16)) -# define ALIGN16_END +#define ALIGN16_BEG __declspec(align(16)) +#define ALIGN16_END #else /* gcc or icc */ -# define ALIGN16_BEG -# define ALIGN16_END __attribute__((aligned(16))) +#define ALIGN16_BEG +#define ALIGN16_END __attribute__((aligned(16))) #endif // constants shared by all paths (C, SSE2). @@ -33,16 +33,16 @@ extern float rdft_w[64]; extern float rdft_wk3ri_first[32]; extern float rdft_wk3ri_second[32]; // constants used by SSE2 but initialized in C path. -extern float rdft_wk1r[32]; -extern float rdft_wk2r[32]; -extern float rdft_wk3r[32]; -extern float rdft_wk1i[32]; -extern float rdft_wk2i[32]; -extern float rdft_wk3i[32]; -extern float cftmdl_wk1r[4]; +extern ALIGN16_BEG float ALIGN16_END rdft_wk1r[32]; +extern ALIGN16_BEG float ALIGN16_END rdft_wk2r[32]; +extern ALIGN16_BEG float ALIGN16_END rdft_wk3r[32]; +extern ALIGN16_BEG float ALIGN16_END rdft_wk1i[32]; +extern ALIGN16_BEG float ALIGN16_END rdft_wk2i[32]; +extern ALIGN16_BEG float ALIGN16_END rdft_wk3i[32]; +extern ALIGN16_BEG float ALIGN16_END cftmdl_wk1r[4]; // code path selection function pointers -typedef void (*rft_sub_128_t)(float *a); +typedef void (*rft_sub_128_t)(float* a); extern rft_sub_128_t rftfsub_128; extern rft_sub_128_t rftbsub_128; extern rft_sub_128_t cft1st_128; @@ -51,7 +51,7 @@ extern rft_sub_128_t cftmdl_128; // entry points void aec_rdft_init(void); void aec_rdft_init_sse2(void); -void aec_rdft_forward_128(float *a); -void aec_rdft_inverse_128(float *a); +void aec_rdft_forward_128(float* a); +void aec_rdft_inverse_128(float* a); #endif // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_MAIN_SOURCE_AEC_RDFT_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_rdft_sse2.c b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_rdft_sse2.c index 49a4072639c3..b4e453ff53ea 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_rdft_sse2.c +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_rdft_sse2.c @@ -12,165 +12,164 @@ #include -static const ALIGN16_BEG float ALIGN16_END k_swap_sign[4] = - {-1.f, 1.f, -1.f, 1.f}; +static const ALIGN16_BEG float ALIGN16_END + k_swap_sign[4] = {-1.f, 1.f, -1.f, 1.f}; -static void cft1st_128_SSE2(float *a) { +static void cft1st_128_SSE2(float* a) { const __m128 mm_swap_sign = _mm_load_ps(k_swap_sign); int j, k2; for (k2 = 0, j = 0; j < 128; j += 16, k2 += 4) { - __m128 a00v = _mm_loadu_ps(&a[j + 0]); - __m128 a04v = _mm_loadu_ps(&a[j + 4]); - __m128 a08v = _mm_loadu_ps(&a[j + 8]); - __m128 a12v = _mm_loadu_ps(&a[j + 12]); - __m128 a01v = _mm_shuffle_ps(a00v, a08v, _MM_SHUFFLE(1, 0, 1 ,0)); - __m128 a23v = _mm_shuffle_ps(a00v, a08v, _MM_SHUFFLE(3, 2, 3 ,2)); - __m128 a45v = _mm_shuffle_ps(a04v, a12v, _MM_SHUFFLE(1, 0, 1 ,0)); - __m128 a67v = _mm_shuffle_ps(a04v, a12v, _MM_SHUFFLE(3, 2, 3 ,2)); + __m128 a00v = _mm_loadu_ps(&a[j + 0]); + __m128 a04v = _mm_loadu_ps(&a[j + 4]); + __m128 a08v = _mm_loadu_ps(&a[j + 8]); + __m128 a12v = _mm_loadu_ps(&a[j + 12]); + __m128 a01v = _mm_shuffle_ps(a00v, a08v, _MM_SHUFFLE(1, 0, 1, 0)); + __m128 a23v = _mm_shuffle_ps(a00v, a08v, _MM_SHUFFLE(3, 2, 3, 2)); + __m128 a45v = _mm_shuffle_ps(a04v, a12v, _MM_SHUFFLE(1, 0, 1, 0)); + __m128 a67v = _mm_shuffle_ps(a04v, a12v, _MM_SHUFFLE(3, 2, 3, 2)); - const __m128 wk1rv = _mm_load_ps(&rdft_wk1r[k2]); - const __m128 wk1iv = _mm_load_ps(&rdft_wk1i[k2]); - const __m128 wk2rv = _mm_load_ps(&rdft_wk2r[k2]); - const __m128 wk2iv = _mm_load_ps(&rdft_wk2i[k2]); - const __m128 wk3rv = _mm_load_ps(&rdft_wk3r[k2]); - const __m128 wk3iv = _mm_load_ps(&rdft_wk3i[k2]); - __m128 x0v = _mm_add_ps(a01v, a23v); - const __m128 x1v = _mm_sub_ps(a01v, a23v); - const __m128 x2v = _mm_add_ps(a45v, a67v); - const __m128 x3v = _mm_sub_ps(a45v, a67v); - __m128 x0w; - a01v = _mm_add_ps(x0v, x2v); - x0v = _mm_sub_ps(x0v, x2v); - x0w = _mm_shuffle_ps(x0v, x0v, _MM_SHUFFLE(2, 3, 0 ,1)); + const __m128 wk1rv = _mm_load_ps(&rdft_wk1r[k2]); + const __m128 wk1iv = _mm_load_ps(&rdft_wk1i[k2]); + const __m128 wk2rv = _mm_load_ps(&rdft_wk2r[k2]); + const __m128 wk2iv = _mm_load_ps(&rdft_wk2i[k2]); + const __m128 wk3rv = _mm_load_ps(&rdft_wk3r[k2]); + const __m128 wk3iv = _mm_load_ps(&rdft_wk3i[k2]); + __m128 x0v = _mm_add_ps(a01v, a23v); + const __m128 x1v = _mm_sub_ps(a01v, a23v); + const __m128 x2v = _mm_add_ps(a45v, a67v); + const __m128 x3v = _mm_sub_ps(a45v, a67v); + __m128 x0w; + a01v = _mm_add_ps(x0v, x2v); + x0v = _mm_sub_ps(x0v, x2v); + x0w = _mm_shuffle_ps(x0v, x0v, _MM_SHUFFLE(2, 3, 0, 1)); { const __m128 a45_0v = _mm_mul_ps(wk2rv, x0v); const __m128 a45_1v = _mm_mul_ps(wk2iv, x0w); - a45v = _mm_add_ps(a45_0v, a45_1v); + a45v = _mm_add_ps(a45_0v, a45_1v); } { - __m128 a23_0v, a23_1v; - const __m128 x3w = _mm_shuffle_ps(x3v, x3v, _MM_SHUFFLE(2, 3, 0 ,1)); - const __m128 x3s = _mm_mul_ps(mm_swap_sign, x3w); - x0v = _mm_add_ps(x1v, x3s); - x0w = _mm_shuffle_ps(x0v, x0v, _MM_SHUFFLE(2, 3, 0 ,1)); - a23_0v = _mm_mul_ps(wk1rv, x0v); - a23_1v = _mm_mul_ps(wk1iv, x0w); - a23v = _mm_add_ps(a23_0v, a23_1v); + __m128 a23_0v, a23_1v; + const __m128 x3w = _mm_shuffle_ps(x3v, x3v, _MM_SHUFFLE(2, 3, 0, 1)); + const __m128 x3s = _mm_mul_ps(mm_swap_sign, x3w); + x0v = _mm_add_ps(x1v, x3s); + x0w = _mm_shuffle_ps(x0v, x0v, _MM_SHUFFLE(2, 3, 0, 1)); + a23_0v = _mm_mul_ps(wk1rv, x0v); + a23_1v = _mm_mul_ps(wk1iv, x0w); + a23v = _mm_add_ps(a23_0v, a23_1v); - x0v = _mm_sub_ps(x1v, x3s); - x0w = _mm_shuffle_ps(x0v, x0v, _MM_SHUFFLE(2, 3, 0 ,1)); + x0v = _mm_sub_ps(x1v, x3s); + x0w = _mm_shuffle_ps(x0v, x0v, _MM_SHUFFLE(2, 3, 0, 1)); } { const __m128 a67_0v = _mm_mul_ps(wk3rv, x0v); const __m128 a67_1v = _mm_mul_ps(wk3iv, x0w); - a67v = _mm_add_ps(a67_0v, a67_1v); + a67v = _mm_add_ps(a67_0v, a67_1v); } - a00v = _mm_shuffle_ps(a01v, a23v, _MM_SHUFFLE(1, 0, 1 ,0)); - a04v = _mm_shuffle_ps(a45v, a67v, _MM_SHUFFLE(1, 0, 1 ,0)); - a08v = _mm_shuffle_ps(a01v, a23v, _MM_SHUFFLE(3, 2, 3 ,2)); - a12v = _mm_shuffle_ps(a45v, a67v, _MM_SHUFFLE(3, 2, 3 ,2)); - _mm_storeu_ps(&a[j + 0], a00v); - _mm_storeu_ps(&a[j + 4], a04v); - _mm_storeu_ps(&a[j + 8], a08v); + a00v = _mm_shuffle_ps(a01v, a23v, _MM_SHUFFLE(1, 0, 1, 0)); + a04v = _mm_shuffle_ps(a45v, a67v, _MM_SHUFFLE(1, 0, 1, 0)); + a08v = _mm_shuffle_ps(a01v, a23v, _MM_SHUFFLE(3, 2, 3, 2)); + a12v = _mm_shuffle_ps(a45v, a67v, _MM_SHUFFLE(3, 2, 3, 2)); + _mm_storeu_ps(&a[j + 0], a00v); + _mm_storeu_ps(&a[j + 4], a04v); + _mm_storeu_ps(&a[j + 8], a08v); _mm_storeu_ps(&a[j + 12], a12v); } } -static void cftmdl_128_SSE2(float *a) { +static void cftmdl_128_SSE2(float* a) { const int l = 8; const __m128 mm_swap_sign = _mm_load_ps(k_swap_sign); int j0; __m128 wk1rv = _mm_load_ps(cftmdl_wk1r); for (j0 = 0; j0 < l; j0 += 2) { - const __m128i a_00 = _mm_loadl_epi64((__m128i*)&a[j0 + 0]); - const __m128i a_08 = _mm_loadl_epi64((__m128i*)&a[j0 + 8]); + const __m128i a_00 = _mm_loadl_epi64((__m128i*)&a[j0 + 0]); + const __m128i a_08 = _mm_loadl_epi64((__m128i*)&a[j0 + 8]); const __m128i a_32 = _mm_loadl_epi64((__m128i*)&a[j0 + 32]); const __m128i a_40 = _mm_loadl_epi64((__m128i*)&a[j0 + 40]); - const __m128 a_00_32 = _mm_shuffle_ps(_mm_castsi128_ps(a_00), - _mm_castsi128_ps(a_32), - _MM_SHUFFLE(1, 0, 1 ,0)); - const __m128 a_08_40 = _mm_shuffle_ps(_mm_castsi128_ps(a_08), - _mm_castsi128_ps(a_40), - _MM_SHUFFLE(1, 0, 1 ,0)); - __m128 x0r0_0i0_0r1_x0i1 = _mm_add_ps(a_00_32, a_08_40); - const __m128 x1r0_1i0_1r1_x1i1 = _mm_sub_ps(a_00_32, a_08_40); + const __m128 a_00_32 = _mm_shuffle_ps(_mm_castsi128_ps(a_00), + _mm_castsi128_ps(a_32), + _MM_SHUFFLE(1, 0, 1, 0)); + const __m128 a_08_40 = _mm_shuffle_ps(_mm_castsi128_ps(a_08), + _mm_castsi128_ps(a_40), + _MM_SHUFFLE(1, 0, 1, 0)); + __m128 x0r0_0i0_0r1_x0i1 = _mm_add_ps(a_00_32, a_08_40); + const __m128 x1r0_1i0_1r1_x1i1 = _mm_sub_ps(a_00_32, a_08_40); const __m128i a_16 = _mm_loadl_epi64((__m128i*)&a[j0 + 16]); const __m128i a_24 = _mm_loadl_epi64((__m128i*)&a[j0 + 24]); const __m128i a_48 = _mm_loadl_epi64((__m128i*)&a[j0 + 48]); const __m128i a_56 = _mm_loadl_epi64((__m128i*)&a[j0 + 56]); - const __m128 a_16_48 = _mm_shuffle_ps(_mm_castsi128_ps(a_16), - _mm_castsi128_ps(a_48), - _MM_SHUFFLE(1, 0, 1 ,0)); - const __m128 a_24_56 = _mm_shuffle_ps(_mm_castsi128_ps(a_24), - _mm_castsi128_ps(a_56), - _MM_SHUFFLE(1, 0, 1 ,0)); - const __m128 x2r0_2i0_2r1_x2i1 = _mm_add_ps(a_16_48, a_24_56); - const __m128 x3r0_3i0_3r1_x3i1 = _mm_sub_ps(a_16_48, a_24_56); + const __m128 a_16_48 = _mm_shuffle_ps(_mm_castsi128_ps(a_16), + _mm_castsi128_ps(a_48), + _MM_SHUFFLE(1, 0, 1, 0)); + const __m128 a_24_56 = _mm_shuffle_ps(_mm_castsi128_ps(a_24), + _mm_castsi128_ps(a_56), + _MM_SHUFFLE(1, 0, 1, 0)); + const __m128 x2r0_2i0_2r1_x2i1 = _mm_add_ps(a_16_48, a_24_56); + const __m128 x3r0_3i0_3r1_x3i1 = _mm_sub_ps(a_16_48, a_24_56); - const __m128 xx0 = _mm_add_ps(x0r0_0i0_0r1_x0i1, x2r0_2i0_2r1_x2i1); - const __m128 xx1 = _mm_sub_ps(x0r0_0i0_0r1_x0i1, x2r0_2i0_2r1_x2i1); + const __m128 xx0 = _mm_add_ps(x0r0_0i0_0r1_x0i1, x2r0_2i0_2r1_x2i1); + const __m128 xx1 = _mm_sub_ps(x0r0_0i0_0r1_x0i1, x2r0_2i0_2r1_x2i1); - const __m128 x3i0_3r0_3i1_x3r1 = _mm_castsi128_ps( - _mm_shuffle_epi32(_mm_castps_si128(x3r0_3i0_3r1_x3i1), - _MM_SHUFFLE(2, 3, 0, 1))); - const __m128 x3_swapped = _mm_mul_ps(mm_swap_sign, x3i0_3r0_3i1_x3r1); - const __m128 x1_x3_add = _mm_add_ps(x1r0_1i0_1r1_x1i1, x3_swapped); - const __m128 x1_x3_sub = _mm_sub_ps(x1r0_1i0_1r1_x1i1, x3_swapped); + const __m128 x3i0_3r0_3i1_x3r1 = _mm_castsi128_ps(_mm_shuffle_epi32( + _mm_castps_si128(x3r0_3i0_3r1_x3i1), _MM_SHUFFLE(2, 3, 0, 1))); + const __m128 x3_swapped = _mm_mul_ps(mm_swap_sign, x3i0_3r0_3i1_x3r1); + const __m128 x1_x3_add = _mm_add_ps(x1r0_1i0_1r1_x1i1, x3_swapped); + const __m128 x1_x3_sub = _mm_sub_ps(x1r0_1i0_1r1_x1i1, x3_swapped); - const __m128 yy0 = _mm_shuffle_ps(x1_x3_add, x1_x3_sub, - _MM_SHUFFLE(2, 2, 2 ,2)); - const __m128 yy1 = _mm_shuffle_ps(x1_x3_add, x1_x3_sub, - _MM_SHUFFLE(3, 3, 3 ,3)); + const __m128 yy0 = + _mm_shuffle_ps(x1_x3_add, x1_x3_sub, _MM_SHUFFLE(2, 2, 2, 2)); + const __m128 yy1 = + _mm_shuffle_ps(x1_x3_add, x1_x3_sub, _MM_SHUFFLE(3, 3, 3, 3)); const __m128 yy2 = _mm_mul_ps(mm_swap_sign, yy1); const __m128 yy3 = _mm_add_ps(yy0, yy2); const __m128 yy4 = _mm_mul_ps(wk1rv, yy3); - _mm_storel_epi64((__m128i*)&a[j0 + 0], _mm_castps_si128(xx0)); - _mm_storel_epi64((__m128i*)&a[j0 + 32], - _mm_shuffle_epi32(_mm_castps_si128(xx0), - _MM_SHUFFLE(3, 2, 3, 2))); + _mm_storel_epi64((__m128i*)&a[j0 + 0], _mm_castps_si128(xx0)); + _mm_storel_epi64( + (__m128i*)&a[j0 + 32], + _mm_shuffle_epi32(_mm_castps_si128(xx0), _MM_SHUFFLE(3, 2, 3, 2))); _mm_storel_epi64((__m128i*)&a[j0 + 16], _mm_castps_si128(xx1)); - _mm_storel_epi64((__m128i*)&a[j0 + 48], - _mm_shuffle_epi32(_mm_castps_si128(xx1), - _MM_SHUFFLE(2, 3, 2, 3))); + _mm_storel_epi64( + (__m128i*)&a[j0 + 48], + _mm_shuffle_epi32(_mm_castps_si128(xx1), _MM_SHUFFLE(2, 3, 2, 3))); a[j0 + 48] = -a[j0 + 48]; - _mm_storel_epi64((__m128i*)&a[j0 + 8], _mm_castps_si128(x1_x3_add)); + _mm_storel_epi64((__m128i*)&a[j0 + 8], _mm_castps_si128(x1_x3_add)); _mm_storel_epi64((__m128i*)&a[j0 + 24], _mm_castps_si128(x1_x3_sub)); _mm_storel_epi64((__m128i*)&a[j0 + 40], _mm_castps_si128(yy4)); - _mm_storel_epi64((__m128i*)&a[j0 + 56], - _mm_shuffle_epi32(_mm_castps_si128(yy4), - _MM_SHUFFLE(2, 3, 2, 3))); + _mm_storel_epi64( + (__m128i*)&a[j0 + 56], + _mm_shuffle_epi32(_mm_castps_si128(yy4), _MM_SHUFFLE(2, 3, 2, 3))); } { int k = 64; int k1 = 2; int k2 = 2 * k1; - const __m128 wk2rv = _mm_load_ps(&rdft_wk2r[k2+0]); - const __m128 wk2iv = _mm_load_ps(&rdft_wk2i[k2+0]); - const __m128 wk1iv = _mm_load_ps(&rdft_wk1i[k2+0]); - const __m128 wk3rv = _mm_load_ps(&rdft_wk3r[k2+0]); - const __m128 wk3iv = _mm_load_ps(&rdft_wk3i[k2+0]); - wk1rv = _mm_load_ps(&rdft_wk1r[k2+0]); + const __m128 wk2rv = _mm_load_ps(&rdft_wk2r[k2 + 0]); + const __m128 wk2iv = _mm_load_ps(&rdft_wk2i[k2 + 0]); + const __m128 wk1iv = _mm_load_ps(&rdft_wk1i[k2 + 0]); + const __m128 wk3rv = _mm_load_ps(&rdft_wk3r[k2 + 0]); + const __m128 wk3iv = _mm_load_ps(&rdft_wk3i[k2 + 0]); + wk1rv = _mm_load_ps(&rdft_wk1r[k2 + 0]); for (j0 = k; j0 < l + k; j0 += 2) { - const __m128i a_00 = _mm_loadl_epi64((__m128i*)&a[j0 + 0]); - const __m128i a_08 = _mm_loadl_epi64((__m128i*)&a[j0 + 8]); + const __m128i a_00 = _mm_loadl_epi64((__m128i*)&a[j0 + 0]); + const __m128i a_08 = _mm_loadl_epi64((__m128i*)&a[j0 + 8]); const __m128i a_32 = _mm_loadl_epi64((__m128i*)&a[j0 + 32]); const __m128i a_40 = _mm_loadl_epi64((__m128i*)&a[j0 + 40]); const __m128 a_00_32 = _mm_shuffle_ps(_mm_castsi128_ps(a_00), _mm_castsi128_ps(a_32), - _MM_SHUFFLE(1, 0, 1 ,0)); + _MM_SHUFFLE(1, 0, 1, 0)); const __m128 a_08_40 = _mm_shuffle_ps(_mm_castsi128_ps(a_08), _mm_castsi128_ps(a_40), - _MM_SHUFFLE(1, 0, 1 ,0)); - __m128 x0r0_0i0_0r1_x0i1 = _mm_add_ps(a_00_32, a_08_40); + _MM_SHUFFLE(1, 0, 1, 0)); + __m128 x0r0_0i0_0r1_x0i1 = _mm_add_ps(a_00_32, a_08_40); const __m128 x1r0_1i0_1r1_x1i1 = _mm_sub_ps(a_00_32, a_08_40); const __m128i a_16 = _mm_loadl_epi64((__m128i*)&a[j0 + 16]); @@ -179,100 +178,102 @@ static void cftmdl_128_SSE2(float *a) { const __m128i a_56 = _mm_loadl_epi64((__m128i*)&a[j0 + 56]); const __m128 a_16_48 = _mm_shuffle_ps(_mm_castsi128_ps(a_16), _mm_castsi128_ps(a_48), - _MM_SHUFFLE(1, 0, 1 ,0)); + _MM_SHUFFLE(1, 0, 1, 0)); const __m128 a_24_56 = _mm_shuffle_ps(_mm_castsi128_ps(a_24), _mm_castsi128_ps(a_56), - _MM_SHUFFLE(1, 0, 1 ,0)); + _MM_SHUFFLE(1, 0, 1, 0)); const __m128 x2r0_2i0_2r1_x2i1 = _mm_add_ps(a_16_48, a_24_56); const __m128 x3r0_3i0_3r1_x3i1 = _mm_sub_ps(a_16_48, a_24_56); const __m128 xx = _mm_add_ps(x0r0_0i0_0r1_x0i1, x2r0_2i0_2r1_x2i1); const __m128 xx1 = _mm_sub_ps(x0r0_0i0_0r1_x0i1, x2r0_2i0_2r1_x2i1); - const __m128 xx2 = _mm_mul_ps(xx1 , wk2rv); - const __m128 xx3 = _mm_mul_ps(wk2iv, - _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(xx1), - _MM_SHUFFLE(2, 3, 0, 1)))); + const __m128 xx2 = _mm_mul_ps(xx1, wk2rv); + const __m128 xx3 = + _mm_mul_ps(wk2iv, + _mm_castsi128_ps(_mm_shuffle_epi32( + _mm_castps_si128(xx1), _MM_SHUFFLE(2, 3, 0, 1)))); const __m128 xx4 = _mm_add_ps(xx2, xx3); - const __m128 x3i0_3r0_3i1_x3r1 = _mm_castsi128_ps( - _mm_shuffle_epi32(_mm_castps_si128(x3r0_3i0_3r1_x3i1), - _MM_SHUFFLE(2, 3, 0, 1))); - const __m128 x3_swapped = _mm_mul_ps(mm_swap_sign, x3i0_3r0_3i1_x3r1); - const __m128 x1_x3_add = _mm_add_ps(x1r0_1i0_1r1_x1i1, x3_swapped); - const __m128 x1_x3_sub = _mm_sub_ps(x1r0_1i0_1r1_x1i1, x3_swapped); + const __m128 x3i0_3r0_3i1_x3r1 = _mm_castsi128_ps(_mm_shuffle_epi32( + _mm_castps_si128(x3r0_3i0_3r1_x3i1), _MM_SHUFFLE(2, 3, 0, 1))); + const __m128 x3_swapped = _mm_mul_ps(mm_swap_sign, x3i0_3r0_3i1_x3r1); + const __m128 x1_x3_add = _mm_add_ps(x1r0_1i0_1r1_x1i1, x3_swapped); + const __m128 x1_x3_sub = _mm_sub_ps(x1r0_1i0_1r1_x1i1, x3_swapped); const __m128 xx10 = _mm_mul_ps(x1_x3_add, wk1rv); - const __m128 xx11 = _mm_mul_ps(wk1iv, + const __m128 xx11 = _mm_mul_ps( + wk1iv, _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(x1_x3_add), _MM_SHUFFLE(2, 3, 0, 1)))); const __m128 xx12 = _mm_add_ps(xx10, xx11); const __m128 xx20 = _mm_mul_ps(x1_x3_sub, wk3rv); - const __m128 xx21 = _mm_mul_ps(wk3iv, + const __m128 xx21 = _mm_mul_ps( + wk3iv, _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(x1_x3_sub), - _MM_SHUFFLE(2, 3, 0, 1)))); + _MM_SHUFFLE(2, 3, 0, 1)))); const __m128 xx22 = _mm_add_ps(xx20, xx21); - _mm_storel_epi64((__m128i*)&a[j0 + 0], _mm_castps_si128(xx)); - _mm_storel_epi64((__m128i*)&a[j0 + 32], - _mm_shuffle_epi32(_mm_castps_si128(xx), - _MM_SHUFFLE(3, 2, 3, 2))); + _mm_storel_epi64((__m128i*)&a[j0 + 0], _mm_castps_si128(xx)); + _mm_storel_epi64( + (__m128i*)&a[j0 + 32], + _mm_shuffle_epi32(_mm_castps_si128(xx), _MM_SHUFFLE(3, 2, 3, 2))); _mm_storel_epi64((__m128i*)&a[j0 + 16], _mm_castps_si128(xx4)); - _mm_storel_epi64((__m128i*)&a[j0 + 48], - _mm_shuffle_epi32(_mm_castps_si128(xx4), - _MM_SHUFFLE(3, 2, 3, 2))); + _mm_storel_epi64( + (__m128i*)&a[j0 + 48], + _mm_shuffle_epi32(_mm_castps_si128(xx4), _MM_SHUFFLE(3, 2, 3, 2))); - _mm_storel_epi64((__m128i*)&a[j0 + 8], _mm_castps_si128(xx12)); - _mm_storel_epi64((__m128i*)&a[j0 + 40], - _mm_shuffle_epi32(_mm_castps_si128(xx12), - _MM_SHUFFLE(3, 2, 3, 2))); + _mm_storel_epi64((__m128i*)&a[j0 + 8], _mm_castps_si128(xx12)); + _mm_storel_epi64( + (__m128i*)&a[j0 + 40], + _mm_shuffle_epi32(_mm_castps_si128(xx12), _MM_SHUFFLE(3, 2, 3, 2))); _mm_storel_epi64((__m128i*)&a[j0 + 24], _mm_castps_si128(xx22)); - _mm_storel_epi64((__m128i*)&a[j0 + 56], - _mm_shuffle_epi32(_mm_castps_si128(xx22), - _MM_SHUFFLE(3, 2, 3, 2))); + _mm_storel_epi64( + (__m128i*)&a[j0 + 56], + _mm_shuffle_epi32(_mm_castps_si128(xx22), _MM_SHUFFLE(3, 2, 3, 2))); } } } -static void rftfsub_128_SSE2(float *a) { - const float *c = rdft_w + 32; +static void rftfsub_128_SSE2(float* a) { + const float* c = rdft_w + 32; int j1, j2, k1, k2; float wkr, wki, xr, xi, yr, yi; - static const ALIGN16_BEG float ALIGN16_END k_half[4] = - {0.5f, 0.5f, 0.5f, 0.5f}; + static const ALIGN16_BEG float ALIGN16_END + k_half[4] = {0.5f, 0.5f, 0.5f, 0.5f}; const __m128 mm_half = _mm_load_ps(k_half); // Vectorized code (four at once). // Note: commented number are indexes for the first iteration of the loop. for (j1 = 1, j2 = 2; j2 + 7 < 64; j1 += 4, j2 += 8) { // Load 'wk'. - const __m128 c_j1 = _mm_loadu_ps(&c[ j1]); // 1, 2, 3, 4, - const __m128 c_k1 = _mm_loadu_ps(&c[29 - j1]); // 28, 29, 30, 31, - const __m128 wkrt = _mm_sub_ps(mm_half, c_k1); // 28, 29, 30, 31, + const __m128 c_j1 = _mm_loadu_ps(&c[j1]); // 1, 2, 3, 4, + const __m128 c_k1 = _mm_loadu_ps(&c[29 - j1]); // 28, 29, 30, 31, + const __m128 wkrt = _mm_sub_ps(mm_half, c_k1); // 28, 29, 30, 31, const __m128 wkr_ = - _mm_shuffle_ps(wkrt, wkrt, _MM_SHUFFLE(0, 1, 2, 3)); // 31, 30, 29, 28, - const __m128 wki_ = c_j1; // 1, 2, 3, 4, + _mm_shuffle_ps(wkrt, wkrt, _MM_SHUFFLE(0, 1, 2, 3)); // 31, 30, 29, 28, + const __m128 wki_ = c_j1; // 1, 2, 3, 4, // Load and shuffle 'a'. - const __m128 a_j2_0 = _mm_loadu_ps(&a[0 + j2]); // 2, 3, 4, 5, - const __m128 a_j2_4 = _mm_loadu_ps(&a[4 + j2]); // 6, 7, 8, 9, + const __m128 a_j2_0 = _mm_loadu_ps(&a[0 + j2]); // 2, 3, 4, 5, + const __m128 a_j2_4 = _mm_loadu_ps(&a[4 + j2]); // 6, 7, 8, 9, const __m128 a_k2_0 = _mm_loadu_ps(&a[122 - j2]); // 120, 121, 122, 123, const __m128 a_k2_4 = _mm_loadu_ps(&a[126 - j2]); // 124, 125, 126, 127, - const __m128 a_j2_p0 = _mm_shuffle_ps(a_j2_0, a_j2_4, - _MM_SHUFFLE(2, 0, 2 ,0)); // 2, 4, 6, 8, - const __m128 a_j2_p1 = _mm_shuffle_ps(a_j2_0, a_j2_4, - _MM_SHUFFLE(3, 1, 3 ,1)); // 3, 5, 7, 9, - const __m128 a_k2_p0 = _mm_shuffle_ps(a_k2_4, a_k2_0, - _MM_SHUFFLE(0, 2, 0 ,2)); // 126, 124, 122, 120, - const __m128 a_k2_p1 = _mm_shuffle_ps(a_k2_4, a_k2_0, - _MM_SHUFFLE(1, 3, 1 ,3)); // 127, 125, 123, 121, + const __m128 a_j2_p0 = _mm_shuffle_ps( + a_j2_0, a_j2_4, _MM_SHUFFLE(2, 0, 2, 0)); // 2, 4, 6, 8, + const __m128 a_j2_p1 = _mm_shuffle_ps( + a_j2_0, a_j2_4, _MM_SHUFFLE(3, 1, 3, 1)); // 3, 5, 7, 9, + const __m128 a_k2_p0 = _mm_shuffle_ps( + a_k2_4, a_k2_0, _MM_SHUFFLE(0, 2, 0, 2)); // 126, 124, 122, 120, + const __m128 a_k2_p1 = _mm_shuffle_ps( + a_k2_4, a_k2_0, _MM_SHUFFLE(1, 3, 1, 3)); // 127, 125, 123, 121, // Calculate 'x'. const __m128 xr_ = _mm_sub_ps(a_j2_p0, a_k2_p0); - // 2-126, 4-124, 6-122, 8-120, + // 2-126, 4-124, 6-122, 8-120, const __m128 xi_ = _mm_add_ps(a_j2_p1, a_k2_p1); - // 3-127, 5-125, 7-123, 9-121, + // 3-127, 5-125, 7-123, 9-121, // Calculate product into 'y'. // yr = wkr * xr - wki * xi; // yi = wkr * xi + wki * xr; @@ -280,12 +281,12 @@ static void rftfsub_128_SSE2(float *a) { const __m128 b_ = _mm_mul_ps(wki_, xi_); const __m128 c_ = _mm_mul_ps(wkr_, xi_); const __m128 d_ = _mm_mul_ps(wki_, xr_); - const __m128 yr_ = _mm_sub_ps(a_, b_); // 2-126, 4-124, 6-122, 8-120, - const __m128 yi_ = _mm_add_ps(c_, d_); // 3-127, 5-125, 7-123, 9-121, - // Update 'a'. - // a[j2 + 0] -= yr; - // a[j2 + 1] -= yi; - // a[k2 + 0] += yr; + const __m128 yr_ = _mm_sub_ps(a_, b_); // 2-126, 4-124, 6-122, 8-120, + const __m128 yi_ = _mm_add_ps(c_, d_); // 3-127, 5-125, 7-123, 9-121, + // Update 'a'. + // a[j2 + 0] -= yr; + // a[j2 + 1] -= yi; + // a[k2 + 0] += yr; // a[k2 + 1] -= yi; const __m128 a_j2_p0n = _mm_sub_ps(a_j2_p0, yr_); // 2, 4, 6, 8, const __m128 a_j2_p1n = _mm_sub_ps(a_j2_p1, yi_); // 3, 5, 7, 9, @@ -293,26 +294,26 @@ static void rftfsub_128_SSE2(float *a) { const __m128 a_k2_p1n = _mm_sub_ps(a_k2_p1, yi_); // 127, 125, 123, 121, // Shuffle in right order and store. const __m128 a_j2_0n = _mm_unpacklo_ps(a_j2_p0n, a_j2_p1n); - // 2, 3, 4, 5, + // 2, 3, 4, 5, const __m128 a_j2_4n = _mm_unpackhi_ps(a_j2_p0n, a_j2_p1n); - // 6, 7, 8, 9, + // 6, 7, 8, 9, const __m128 a_k2_0nt = _mm_unpackhi_ps(a_k2_p0n, a_k2_p1n); - // 122, 123, 120, 121, + // 122, 123, 120, 121, const __m128 a_k2_4nt = _mm_unpacklo_ps(a_k2_p0n, a_k2_p1n); - // 126, 127, 124, 125, - const __m128 a_k2_0n = _mm_shuffle_ps(a_k2_0nt, a_k2_0nt, - _MM_SHUFFLE(1, 0, 3 ,2)); // 120, 121, 122, 123, - const __m128 a_k2_4n = _mm_shuffle_ps(a_k2_4nt, a_k2_4nt, - _MM_SHUFFLE(1, 0, 3 ,2)); // 124, 125, 126, 127, - _mm_storeu_ps(&a[0 + j2], a_j2_0n); - _mm_storeu_ps(&a[4 + j2], a_j2_4n); + // 126, 127, 124, 125, + const __m128 a_k2_0n = _mm_shuffle_ps( + a_k2_0nt, a_k2_0nt, _MM_SHUFFLE(1, 0, 3, 2)); // 120, 121, 122, 123, + const __m128 a_k2_4n = _mm_shuffle_ps( + a_k2_4nt, a_k2_4nt, _MM_SHUFFLE(1, 0, 3, 2)); // 124, 125, 126, 127, + _mm_storeu_ps(&a[0 + j2], a_j2_0n); + _mm_storeu_ps(&a[4 + j2], a_j2_4n); _mm_storeu_ps(&a[122 - j2], a_k2_0n); _mm_storeu_ps(&a[126 - j2], a_k2_4n); } // Scalar code for the remaining items. for (; j2 < 64; j1 += 1, j2 += 2) { k2 = 128 - j2; - k1 = 32 - j1; + k1 = 32 - j1; wkr = 0.5f - c[k1]; wki = c[j1]; xr = a[j2 + 0] - a[k2 + 0]; @@ -326,13 +327,13 @@ static void rftfsub_128_SSE2(float *a) { } } -static void rftbsub_128_SSE2(float *a) { - const float *c = rdft_w + 32; +static void rftbsub_128_SSE2(float* a) { + const float* c = rdft_w + 32; int j1, j2, k1, k2; float wkr, wki, xr, xi, yr, yi; - static const ALIGN16_BEG float ALIGN16_END k_half[4] = - {0.5f, 0.5f, 0.5f, 0.5f}; + static const ALIGN16_BEG float ALIGN16_END + k_half[4] = {0.5f, 0.5f, 0.5f, 0.5f}; const __m128 mm_half = _mm_load_ps(k_half); a[1] = -a[1]; @@ -340,30 +341,30 @@ static void rftbsub_128_SSE2(float *a) { // Note: commented number are indexes for the first iteration of the loop. for (j1 = 1, j2 = 2; j2 + 7 < 64; j1 += 4, j2 += 8) { // Load 'wk'. - const __m128 c_j1 = _mm_loadu_ps(&c[ j1]); // 1, 2, 3, 4, - const __m128 c_k1 = _mm_loadu_ps(&c[29 - j1]); // 28, 29, 30, 31, - const __m128 wkrt = _mm_sub_ps(mm_half, c_k1); // 28, 29, 30, 31, + const __m128 c_j1 = _mm_loadu_ps(&c[j1]); // 1, 2, 3, 4, + const __m128 c_k1 = _mm_loadu_ps(&c[29 - j1]); // 28, 29, 30, 31, + const __m128 wkrt = _mm_sub_ps(mm_half, c_k1); // 28, 29, 30, 31, const __m128 wkr_ = - _mm_shuffle_ps(wkrt, wkrt, _MM_SHUFFLE(0, 1, 2, 3)); // 31, 30, 29, 28, - const __m128 wki_ = c_j1; // 1, 2, 3, 4, + _mm_shuffle_ps(wkrt, wkrt, _MM_SHUFFLE(0, 1, 2, 3)); // 31, 30, 29, 28, + const __m128 wki_ = c_j1; // 1, 2, 3, 4, // Load and shuffle 'a'. - const __m128 a_j2_0 = _mm_loadu_ps(&a[0 + j2]); // 2, 3, 4, 5, - const __m128 a_j2_4 = _mm_loadu_ps(&a[4 + j2]); // 6, 7, 8, 9, + const __m128 a_j2_0 = _mm_loadu_ps(&a[0 + j2]); // 2, 3, 4, 5, + const __m128 a_j2_4 = _mm_loadu_ps(&a[4 + j2]); // 6, 7, 8, 9, const __m128 a_k2_0 = _mm_loadu_ps(&a[122 - j2]); // 120, 121, 122, 123, const __m128 a_k2_4 = _mm_loadu_ps(&a[126 - j2]); // 124, 125, 126, 127, - const __m128 a_j2_p0 = _mm_shuffle_ps(a_j2_0, a_j2_4, - _MM_SHUFFLE(2, 0, 2 ,0)); // 2, 4, 6, 8, - const __m128 a_j2_p1 = _mm_shuffle_ps(a_j2_0, a_j2_4, - _MM_SHUFFLE(3, 1, 3 ,1)); // 3, 5, 7, 9, - const __m128 a_k2_p0 = _mm_shuffle_ps(a_k2_4, a_k2_0, - _MM_SHUFFLE(0, 2, 0 ,2)); // 126, 124, 122, 120, - const __m128 a_k2_p1 = _mm_shuffle_ps(a_k2_4, a_k2_0, - _MM_SHUFFLE(1, 3, 1 ,3)); // 127, 125, 123, 121, + const __m128 a_j2_p0 = _mm_shuffle_ps( + a_j2_0, a_j2_4, _MM_SHUFFLE(2, 0, 2, 0)); // 2, 4, 6, 8, + const __m128 a_j2_p1 = _mm_shuffle_ps( + a_j2_0, a_j2_4, _MM_SHUFFLE(3, 1, 3, 1)); // 3, 5, 7, 9, + const __m128 a_k2_p0 = _mm_shuffle_ps( + a_k2_4, a_k2_0, _MM_SHUFFLE(0, 2, 0, 2)); // 126, 124, 122, 120, + const __m128 a_k2_p1 = _mm_shuffle_ps( + a_k2_4, a_k2_0, _MM_SHUFFLE(1, 3, 1, 3)); // 127, 125, 123, 121, // Calculate 'x'. const __m128 xr_ = _mm_sub_ps(a_j2_p0, a_k2_p0); - // 2-126, 4-124, 6-122, 8-120, + // 2-126, 4-124, 6-122, 8-120, const __m128 xi_ = _mm_add_ps(a_j2_p1, a_k2_p1); - // 3-127, 5-125, 7-123, 9-121, + // 3-127, 5-125, 7-123, 9-121, // Calculate product into 'y'. // yr = wkr * xr + wki * xi; // yi = wkr * xi - wki * xr; @@ -371,12 +372,12 @@ static void rftbsub_128_SSE2(float *a) { const __m128 b_ = _mm_mul_ps(wki_, xi_); const __m128 c_ = _mm_mul_ps(wkr_, xi_); const __m128 d_ = _mm_mul_ps(wki_, xr_); - const __m128 yr_ = _mm_add_ps(a_, b_); // 2-126, 4-124, 6-122, 8-120, - const __m128 yi_ = _mm_sub_ps(c_, d_); // 3-127, 5-125, 7-123, 9-121, - // Update 'a'. - // a[j2 + 0] = a[j2 + 0] - yr; - // a[j2 + 1] = yi - a[j2 + 1]; - // a[k2 + 0] = yr + a[k2 + 0]; + const __m128 yr_ = _mm_add_ps(a_, b_); // 2-126, 4-124, 6-122, 8-120, + const __m128 yi_ = _mm_sub_ps(c_, d_); // 3-127, 5-125, 7-123, 9-121, + // Update 'a'. + // a[j2 + 0] = a[j2 + 0] - yr; + // a[j2 + 1] = yi - a[j2 + 1]; + // a[k2 + 0] = yr + a[k2 + 0]; // a[k2 + 1] = yi - a[k2 + 1]; const __m128 a_j2_p0n = _mm_sub_ps(a_j2_p0, yr_); // 2, 4, 6, 8, const __m128 a_j2_p1n = _mm_sub_ps(yi_, a_j2_p1); // 3, 5, 7, 9, @@ -384,26 +385,26 @@ static void rftbsub_128_SSE2(float *a) { const __m128 a_k2_p1n = _mm_sub_ps(yi_, a_k2_p1); // 127, 125, 123, 121, // Shuffle in right order and store. const __m128 a_j2_0n = _mm_unpacklo_ps(a_j2_p0n, a_j2_p1n); - // 2, 3, 4, 5, + // 2, 3, 4, 5, const __m128 a_j2_4n = _mm_unpackhi_ps(a_j2_p0n, a_j2_p1n); - // 6, 7, 8, 9, + // 6, 7, 8, 9, const __m128 a_k2_0nt = _mm_unpackhi_ps(a_k2_p0n, a_k2_p1n); - // 122, 123, 120, 121, + // 122, 123, 120, 121, const __m128 a_k2_4nt = _mm_unpacklo_ps(a_k2_p0n, a_k2_p1n); - // 126, 127, 124, 125, - const __m128 a_k2_0n = _mm_shuffle_ps(a_k2_0nt, a_k2_0nt, - _MM_SHUFFLE(1, 0, 3 ,2)); // 120, 121, 122, 123, - const __m128 a_k2_4n = _mm_shuffle_ps(a_k2_4nt, a_k2_4nt, - _MM_SHUFFLE(1, 0, 3 ,2)); // 124, 125, 126, 127, - _mm_storeu_ps(&a[0 + j2], a_j2_0n); - _mm_storeu_ps(&a[4 + j2], a_j2_4n); + // 126, 127, 124, 125, + const __m128 a_k2_0n = _mm_shuffle_ps( + a_k2_0nt, a_k2_0nt, _MM_SHUFFLE(1, 0, 3, 2)); // 120, 121, 122, 123, + const __m128 a_k2_4n = _mm_shuffle_ps( + a_k2_4nt, a_k2_4nt, _MM_SHUFFLE(1, 0, 3, 2)); // 124, 125, 126, 127, + _mm_storeu_ps(&a[0 + j2], a_j2_0n); + _mm_storeu_ps(&a[4 + j2], a_j2_4n); _mm_storeu_ps(&a[122 - j2], a_k2_0n); _mm_storeu_ps(&a[126 - j2], a_k2_4n); } // Scalar code for the remaining items. for (; j2 < 64; j1 += 1, j2 += 2) { k2 = 128 - j2; - k1 = 32 - j1; + k1 = 32 - j1; wkr = 0.5f - c[k1]; wki = c[j1]; xr = a[j2 + 0] - a[k2 + 0]; diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_resampler.c b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_resampler.c index 13521ec78c34..5382665e10d8 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_resampler.c +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_resampler.c @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -/* Resamples a signal to an arbitrary rate. Used by the AEC to compensate for clock - * skew by resampling the farend signal. +/* Resamples a signal to an arbitrary rate. Used by the AEC to compensate for + * clock skew by resampling the farend signal. */ #include "webrtc/modules/audio_processing/aec/aec_resampler.h" @@ -21,214 +21,205 @@ #include "webrtc/modules/audio_processing/aec/aec_core.h" -enum { kEstimateLengthFrames = 400 }; +enum { + kEstimateLengthFrames = 400 +}; typedef struct { - short buffer[kResamplerBufferSize]; - float position; + short buffer[kResamplerBufferSize]; + float position; - int deviceSampleRateHz; - int skewData[kEstimateLengthFrames]; - int skewDataIndex; - float skewEstimate; + int deviceSampleRateHz; + int skewData[kEstimateLengthFrames]; + int skewDataIndex; + float skewEstimate; } resampler_t; static int EstimateSkew(const int* rawSkew, int size, int absLimit, - float *skewEst); + float* skewEst); -int WebRtcAec_CreateResampler(void **resampInst) -{ - resampler_t *obj = malloc(sizeof(resampler_t)); - *resampInst = obj; - if (obj == NULL) { - return -1; - } +int WebRtcAec_CreateResampler(void** resampInst) { + resampler_t* obj = malloc(sizeof(resampler_t)); + *resampInst = obj; + if (obj == NULL) { + return -1; + } - return 0; + return 0; } -int WebRtcAec_InitResampler(void *resampInst, int deviceSampleRateHz) -{ - resampler_t *obj = (resampler_t*) resampInst; - memset(obj->buffer, 0, sizeof(obj->buffer)); - obj->position = 0.0; +int WebRtcAec_InitResampler(void* resampInst, int deviceSampleRateHz) { + resampler_t* obj = (resampler_t*)resampInst; + memset(obj->buffer, 0, sizeof(obj->buffer)); + obj->position = 0.0; - obj->deviceSampleRateHz = deviceSampleRateHz; - memset(obj->skewData, 0, sizeof(obj->skewData)); - obj->skewDataIndex = 0; - obj->skewEstimate = 0.0; + obj->deviceSampleRateHz = deviceSampleRateHz; + memset(obj->skewData, 0, sizeof(obj->skewData)); + obj->skewDataIndex = 0; + obj->skewEstimate = 0.0; - return 0; + return 0; } -int WebRtcAec_FreeResampler(void *resampInst) -{ - resampler_t *obj = (resampler_t*) resampInst; - free(obj); +int WebRtcAec_FreeResampler(void* resampInst) { + resampler_t* obj = (resampler_t*)resampInst; + free(obj); - return 0; + return 0; } -void WebRtcAec_ResampleLinear(void *resampInst, - const short *inspeech, +void WebRtcAec_ResampleLinear(void* resampInst, + const short* inspeech, int size, float skew, - short *outspeech, - int *size_out) -{ - resampler_t *obj = (resampler_t*) resampInst; + short* outspeech, + int* size_out) { + resampler_t* obj = (resampler_t*)resampInst; - short *y; - float be, tnew, interp; - int tn, mm; + short* y; + float be, tnew, interp; + int tn, mm; - assert(!(size < 0 || size > 2 * FRAME_LEN)); - assert(resampInst != NULL); - assert(inspeech != NULL); - assert(outspeech != NULL); - assert(size_out != NULL); + assert(!(size < 0 || size > 2 * FRAME_LEN)); + assert(resampInst != NULL); + assert(inspeech != NULL); + assert(outspeech != NULL); + assert(size_out != NULL); - // Add new frame data in lookahead - memcpy(&obj->buffer[FRAME_LEN + kResamplingDelay], - inspeech, - size * sizeof(short)); + // Add new frame data in lookahead + memcpy(&obj->buffer[FRAME_LEN + kResamplingDelay], + inspeech, + size * sizeof(short)); - // Sample rate ratio - be = 1 + skew; + // Sample rate ratio + be = 1 + skew; - // Loop over input frame - mm = 0; - y = &obj->buffer[FRAME_LEN]; // Point at current frame + // Loop over input frame + mm = 0; + y = &obj->buffer[FRAME_LEN]; // Point at current frame + + tnew = be * mm + obj->position; + tn = (int)tnew; + + while (tn < size) { + + // Interpolation + interp = y[tn] + (tnew - tn) * (y[tn + 1] - y[tn]); + + if (interp > 32767) { + interp = 32767; + } else if (interp < -32768) { + interp = -32768; + } + + outspeech[mm] = (short)interp; + mm++; tnew = be * mm + obj->position; - tn = (int) tnew; + tn = (int)tnew; + } - while (tn < size) { + *size_out = mm; + obj->position += (*size_out) * be - size; - // Interpolation - interp = y[tn] + (tnew - tn) * (y[tn+1] - y[tn]); - - if (interp > 32767) { - interp = 32767; - } - else if (interp < -32768) { - interp = -32768; - } - - outspeech[mm] = (short) interp; - mm++; - - tnew = be * mm + obj->position; - tn = (int) tnew; - } - - *size_out = mm; - obj->position += (*size_out) * be - size; - - // Shift buffer - memmove(obj->buffer, - &obj->buffer[size], - (kResamplerBufferSize - size) * sizeof(short)); + // Shift buffer + memmove(obj->buffer, + &obj->buffer[size], + (kResamplerBufferSize - size) * sizeof(short)); } -int WebRtcAec_GetSkew(void *resampInst, int rawSkew, float *skewEst) -{ - resampler_t *obj = (resampler_t*)resampInst; - int err = 0; +int WebRtcAec_GetSkew(void* resampInst, int rawSkew, float* skewEst) { + resampler_t* obj = (resampler_t*)resampInst; + int err = 0; - if (obj->skewDataIndex < kEstimateLengthFrames) { - obj->skewData[obj->skewDataIndex] = rawSkew; - obj->skewDataIndex++; - } - else if (obj->skewDataIndex == kEstimateLengthFrames) { - err = EstimateSkew(obj->skewData, - kEstimateLengthFrames, - obj->deviceSampleRateHz, - skewEst); - obj->skewEstimate = *skewEst; - obj->skewDataIndex++; - } - else { - *skewEst = obj->skewEstimate; - } + if (obj->skewDataIndex < kEstimateLengthFrames) { + obj->skewData[obj->skewDataIndex] = rawSkew; + obj->skewDataIndex++; + } else if (obj->skewDataIndex == kEstimateLengthFrames) { + err = EstimateSkew( + obj->skewData, kEstimateLengthFrames, obj->deviceSampleRateHz, skewEst); + obj->skewEstimate = *skewEst; + obj->skewDataIndex++; + } else { + *skewEst = obj->skewEstimate; + } - return err; + return err; } int EstimateSkew(const int* rawSkew, int size, int deviceSampleRateHz, - float *skewEst) -{ - const int absLimitOuter = (int)(0.04f * deviceSampleRateHz); - const int absLimitInner = (int)(0.0025f * deviceSampleRateHz); - int i = 0; - int n = 0; - float rawAvg = 0; - float err = 0; - float rawAbsDev = 0; - int upperLimit = 0; - int lowerLimit = 0; - float cumSum = 0; - float x = 0; - float x2 = 0; - float y = 0; - float xy = 0; - float xAvg = 0; - float denom = 0; - float skew = 0; + float* skewEst) { + const int absLimitOuter = (int)(0.04f * deviceSampleRateHz); + const int absLimitInner = (int)(0.0025f * deviceSampleRateHz); + int i = 0; + int n = 0; + float rawAvg = 0; + float err = 0; + float rawAbsDev = 0; + int upperLimit = 0; + int lowerLimit = 0; + float cumSum = 0; + float x = 0; + float x2 = 0; + float y = 0; + float xy = 0; + float xAvg = 0; + float denom = 0; + float skew = 0; - *skewEst = 0; // Set in case of error below. - for (i = 0; i < size; i++) { - if ((rawSkew[i] < absLimitOuter && rawSkew[i] > -absLimitOuter)) { - n++; - rawAvg += rawSkew[i]; - } + *skewEst = 0; // Set in case of error below. + for (i = 0; i < size; i++) { + if ((rawSkew[i] < absLimitOuter && rawSkew[i] > -absLimitOuter)) { + n++; + rawAvg += rawSkew[i]; } + } - if (n == 0) { - return -1; + if (n == 0) { + return -1; + } + assert(n > 0); + rawAvg /= n; + + for (i = 0; i < size; i++) { + if ((rawSkew[i] < absLimitOuter && rawSkew[i] > -absLimitOuter)) { + err = rawSkew[i] - rawAvg; + rawAbsDev += err >= 0 ? err : -err; } - assert(n > 0); - rawAvg /= n; + } + assert(n > 0); + rawAbsDev /= n; + upperLimit = (int)(rawAvg + 5 * rawAbsDev + 1); // +1 for ceiling. + lowerLimit = (int)(rawAvg - 5 * rawAbsDev - 1); // -1 for floor. - for (i = 0; i < size; i++) { - if ((rawSkew[i] < absLimitOuter && rawSkew[i] > -absLimitOuter)) { - err = rawSkew[i] - rawAvg; - rawAbsDev += err >= 0 ? err : -err; - } + n = 0; + for (i = 0; i < size; i++) { + if ((rawSkew[i] < absLimitInner && rawSkew[i] > -absLimitInner) || + (rawSkew[i] < upperLimit && rawSkew[i] > lowerLimit)) { + n++; + cumSum += rawSkew[i]; + x += n; + x2 += n * n; + y += cumSum; + xy += n * cumSum; } - assert(n > 0); - rawAbsDev /= n; - upperLimit = (int)(rawAvg + 5 * rawAbsDev + 1); // +1 for ceiling. - lowerLimit = (int)(rawAvg - 5 * rawAbsDev - 1); // -1 for floor. + } - n = 0; - for (i = 0; i < size; i++) { - if ((rawSkew[i] < absLimitInner && rawSkew[i] > -absLimitInner) || - (rawSkew[i] < upperLimit && rawSkew[i] > lowerLimit)) { - n++; - cumSum += rawSkew[i]; - x += n; - x2 += n*n; - y += cumSum; - xy += n * cumSum; - } - } + if (n == 0) { + return -1; + } + assert(n > 0); + xAvg = x / n; + denom = x2 - xAvg * x; - if (n == 0) { - return -1; - } - assert(n > 0); - xAvg = x / n; - denom = x2 - xAvg*x; + if (denom != 0) { + skew = (xy - xAvg * y) / denom; + } - if (denom != 0) { - skew = (xy - xAvg*y) / denom; - } - - *skewEst = skew; - return 0; + *skewEst = skew; + return 0; } diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_resampler.h b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_resampler.h index 3cd0691e0101..e42c056f6589 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_resampler.h +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_resampler.h @@ -13,23 +13,27 @@ #include "webrtc/modules/audio_processing/aec/aec_core.h" -enum { kResamplingDelay = 1 }; -enum { kResamplerBufferSize = FRAME_LEN * 4 }; +enum { + kResamplingDelay = 1 +}; +enum { + kResamplerBufferSize = FRAME_LEN * 4 +}; // Unless otherwise specified, functions return 0 on success and -1 on error -int WebRtcAec_CreateResampler(void **resampInst); -int WebRtcAec_InitResampler(void *resampInst, int deviceSampleRateHz); -int WebRtcAec_FreeResampler(void *resampInst); +int WebRtcAec_CreateResampler(void** resampInst); +int WebRtcAec_InitResampler(void* resampInst, int deviceSampleRateHz); +int WebRtcAec_FreeResampler(void* resampInst); // Estimates skew from raw measurement. -int WebRtcAec_GetSkew(void *resampInst, int rawSkew, float *skewEst); +int WebRtcAec_GetSkew(void* resampInst, int rawSkew, float* skewEst); // Resamples input using linear interpolation. -void WebRtcAec_ResampleLinear(void *resampInst, - const short *inspeech, +void WebRtcAec_ResampleLinear(void* resampInst, + const short* inspeech, int size, float skew, - short *outspeech, - int *size_out); + short* outspeech, + int* size_out); #endif // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_RESAMPLER_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/echo_cancellation.c b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/echo_cancellation.c index 27da67de515c..bbdd5f628b23 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/echo_cancellation.c +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/echo_cancellation.c @@ -66,20 +66,23 @@ // GTP/Linux(ChromeOS): TBD, but for the moment we will trust the values. #if defined(WEBRTC_CHROMIUM_BUILD) && defined(WEBRTC_MAC) #define WEBRTC_UNTRUSTED_DELAY + +#if defined(WEBRTC_MAC) +static const int kDelayDiffOffsetSamples = -160; +#else +// Not enabled for now. +static const int kDelayDiffOffsetSamples = 0; +#endif #endif #if defined(WEBRTC_MAC) static const int kFixedDelayMs = 20; -static const int kDelayDiffOffsetSamples = -160; -#elif defined(WEBRTC_WIN) -static const int kFixedDelayMs = 50; -static const int kDelayDiffOffsetSamples = 0; #else -// Essentially ChromeOS. static const int kFixedDelayMs = 50; -static const int kDelayDiffOffsetSamples = 0; #endif +#if !defined(WEBRTC_UNTRUSTED_DELAY) static const int kMinTrustedDelayMs = 20; +#endif static const int kMaxTrustedDelayMs = 500; // Maximum length of resampled signal. Must be an integer multiple of frames @@ -89,7 +92,7 @@ static const int kMaxTrustedDelayMs = 500; #define MAX_RESAMP_LEN (5 * FRAME_LEN) static const int kMaxBufSizeStart = 62; // In partitions -static const int sampMsNb = 8; // samples per ms in nb +static const int sampMsNb = 8; // samples per ms in nb static const int initCheck = 42; #ifdef WEBRTC_AEC_DEBUG_DUMP @@ -98,334 +101,351 @@ int webrtc_aec_instance_count = 0; // Estimates delay to set the position of the far-end buffer read pointer // (controlled by knownDelay) -static void EstBufDelayNormal(aecpc_t *aecInst); -static void EstBufDelayExtended(aecpc_t *aecInst); -static int ProcessNormal(aecpc_t* self, const int16_t* near, - const int16_t* near_high, int16_t* out, int16_t* out_high, - int16_t num_samples, int16_t reported_delay_ms, int32_t skew); -static void ProcessExtended(aecpc_t* self, const int16_t* near, - const int16_t* near_high, int16_t* out, int16_t* out_high, - int16_t num_samples, int16_t reported_delay_ms, int32_t skew); +static void EstBufDelayNormal(aecpc_t* aecInst); +static void EstBufDelayExtended(aecpc_t* aecInst); +static int ProcessNormal(aecpc_t* self, + const int16_t* near, + const int16_t* near_high, + int16_t* out, + int16_t* out_high, + int16_t num_samples, + int16_t reported_delay_ms, + int32_t skew); +static void ProcessExtended(aecpc_t* self, + const int16_t* near, + const int16_t* near_high, + int16_t* out, + int16_t* out_high, + int16_t num_samples, + int16_t reported_delay_ms, + int32_t skew); -int32_t WebRtcAec_Create(void **aecInst) -{ - aecpc_t *aecpc; - if (aecInst == NULL) { - return -1; - } +int32_t WebRtcAec_Create(void** aecInst) { + aecpc_t* aecpc; + if (aecInst == NULL) { + return -1; + } - aecpc = malloc(sizeof(aecpc_t)); - *aecInst = aecpc; - if (aecpc == NULL) { - return -1; - } + aecpc = malloc(sizeof(aecpc_t)); + *aecInst = aecpc; + if (aecpc == NULL) { + return -1; + } - if (WebRtcAec_CreateAec(&aecpc->aec) == -1) { - WebRtcAec_Free(aecpc); - aecpc = NULL; - return -1; - } + if (WebRtcAec_CreateAec(&aecpc->aec) == -1) { + WebRtcAec_Free(aecpc); + aecpc = NULL; + return -1; + } - if (WebRtcAec_CreateResampler(&aecpc->resampler) == -1) { - WebRtcAec_Free(aecpc); - aecpc = NULL; - return -1; - } - // Create far-end pre-buffer. The buffer size has to be large enough for - // largest possible drift compensation (kResamplerBufferSize) + "almost" an - // FFT buffer (PART_LEN2 - 1). - aecpc->far_pre_buf = WebRtc_CreateBuffer(PART_LEN2 + kResamplerBufferSize, - sizeof(float)); - if (!aecpc->far_pre_buf) { - WebRtcAec_Free(aecpc); - aecpc = NULL; - return -1; - } + if (WebRtcAec_CreateResampler(&aecpc->resampler) == -1) { + WebRtcAec_Free(aecpc); + aecpc = NULL; + return -1; + } + // Create far-end pre-buffer. The buffer size has to be large enough for + // largest possible drift compensation (kResamplerBufferSize) + "almost" an + // FFT buffer (PART_LEN2 - 1). + aecpc->far_pre_buf = + WebRtc_CreateBuffer(PART_LEN2 + kResamplerBufferSize, sizeof(float)); + if (!aecpc->far_pre_buf) { + WebRtcAec_Free(aecpc); + aecpc = NULL; + return -1; + } - aecpc->initFlag = 0; - aecpc->lastError = 0; + aecpc->initFlag = 0; + aecpc->lastError = 0; #ifdef WEBRTC_AEC_DEBUG_DUMP - aecpc->far_pre_buf_s16 = WebRtc_CreateBuffer( - PART_LEN2 + kResamplerBufferSize, sizeof(int16_t)); - if (!aecpc->far_pre_buf_s16) { - WebRtcAec_Free(aecpc); - aecpc = NULL; - return -1; - } - { - char filename[64]; - sprintf(filename, "aec_buf%d.dat", webrtc_aec_instance_count); - aecpc->bufFile = fopen(filename, "wb"); - sprintf(filename, "aec_skew%d.dat", webrtc_aec_instance_count); - aecpc->skewFile = fopen(filename, "wb"); - sprintf(filename, "aec_delay%d.dat", webrtc_aec_instance_count); - aecpc->delayFile = fopen(filename, "wb"); - webrtc_aec_instance_count++; - } + aecpc->far_pre_buf_s16 = + WebRtc_CreateBuffer(PART_LEN2 + kResamplerBufferSize, sizeof(int16_t)); + if (!aecpc->far_pre_buf_s16) { + WebRtcAec_Free(aecpc); + aecpc = NULL; + return -1; + } + { + char filename[64]; + sprintf(filename, "aec_buf%d.dat", webrtc_aec_instance_count); + aecpc->bufFile = fopen(filename, "wb"); + sprintf(filename, "aec_skew%d.dat", webrtc_aec_instance_count); + aecpc->skewFile = fopen(filename, "wb"); + sprintf(filename, "aec_delay%d.dat", webrtc_aec_instance_count); + aecpc->delayFile = fopen(filename, "wb"); + webrtc_aec_instance_count++; + } #endif - return 0; + return 0; } -int32_t WebRtcAec_Free(void *aecInst) -{ - aecpc_t *aecpc = aecInst; +int32_t WebRtcAec_Free(void* aecInst) { + aecpc_t* aecpc = aecInst; - if (aecpc == NULL) { - return -1; - } + if (aecpc == NULL) { + return -1; + } - WebRtc_FreeBuffer(aecpc->far_pre_buf); + WebRtc_FreeBuffer(aecpc->far_pre_buf); #ifdef WEBRTC_AEC_DEBUG_DUMP - WebRtc_FreeBuffer(aecpc->far_pre_buf_s16); - fclose(aecpc->bufFile); - fclose(aecpc->skewFile); - fclose(aecpc->delayFile); + WebRtc_FreeBuffer(aecpc->far_pre_buf_s16); + fclose(aecpc->bufFile); + fclose(aecpc->skewFile); + fclose(aecpc->delayFile); #endif - WebRtcAec_FreeAec(aecpc->aec); - WebRtcAec_FreeResampler(aecpc->resampler); - free(aecpc); + WebRtcAec_FreeAec(aecpc->aec); + WebRtcAec_FreeResampler(aecpc->resampler); + free(aecpc); - return 0; + return 0; } -int32_t WebRtcAec_Init(void *aecInst, int32_t sampFreq, int32_t scSampFreq) -{ - aecpc_t *aecpc = aecInst; - AecConfig aecConfig; +int32_t WebRtcAec_Init(void* aecInst, int32_t sampFreq, int32_t scSampFreq) { + aecpc_t* aecpc = aecInst; + AecConfig aecConfig; - if (sampFreq != 8000 && sampFreq != 16000 && sampFreq != 32000) { - aecpc->lastError = AEC_BAD_PARAMETER_ERROR; - return -1; - } - aecpc->sampFreq = sampFreq; + if (sampFreq != 8000 && sampFreq != 16000 && sampFreq != 32000) { + aecpc->lastError = AEC_BAD_PARAMETER_ERROR; + return -1; + } + aecpc->sampFreq = sampFreq; - if (scSampFreq < 1 || scSampFreq > 96000) { - aecpc->lastError = AEC_BAD_PARAMETER_ERROR; - return -1; - } - aecpc->scSampFreq = scSampFreq; + if (scSampFreq < 1 || scSampFreq > 96000) { + aecpc->lastError = AEC_BAD_PARAMETER_ERROR; + return -1; + } + aecpc->scSampFreq = scSampFreq; - // Initialize echo canceller core - if (WebRtcAec_InitAec(aecpc->aec, aecpc->sampFreq) == -1) { - aecpc->lastError = AEC_UNSPECIFIED_ERROR; - return -1; - } + // Initialize echo canceller core + if (WebRtcAec_InitAec(aecpc->aec, aecpc->sampFreq) == -1) { + aecpc->lastError = AEC_UNSPECIFIED_ERROR; + return -1; + } - if (WebRtcAec_InitResampler(aecpc->resampler, aecpc->scSampFreq) == -1) { - aecpc->lastError = AEC_UNSPECIFIED_ERROR; - return -1; - } + if (WebRtcAec_InitResampler(aecpc->resampler, aecpc->scSampFreq) == -1) { + aecpc->lastError = AEC_UNSPECIFIED_ERROR; + return -1; + } - if (WebRtc_InitBuffer(aecpc->far_pre_buf) == -1) { - aecpc->lastError = AEC_UNSPECIFIED_ERROR; - return -1; - } - WebRtc_MoveReadPtr(aecpc->far_pre_buf, -PART_LEN); // Start overlap. + if (WebRtc_InitBuffer(aecpc->far_pre_buf) == -1) { + aecpc->lastError = AEC_UNSPECIFIED_ERROR; + return -1; + } + WebRtc_MoveReadPtr(aecpc->far_pre_buf, -PART_LEN); // Start overlap. - aecpc->initFlag = initCheck; // indicates that initialization has been done + aecpc->initFlag = initCheck; // indicates that initialization has been done - if (aecpc->sampFreq == 32000) { - aecpc->splitSampFreq = 16000; - } - else { - aecpc->splitSampFreq = sampFreq; - } + if (aecpc->sampFreq == 32000) { + aecpc->splitSampFreq = 16000; + } else { + aecpc->splitSampFreq = sampFreq; + } - aecpc->delayCtr = 0; - aecpc->sampFactor = (aecpc->scSampFreq * 1.0f) / aecpc->splitSampFreq; - // Sampling frequency multiplier (SWB is processed as 160 frame size). - aecpc->rate_factor = aecpc->splitSampFreq / 8000; + aecpc->delayCtr = 0; + aecpc->sampFactor = (aecpc->scSampFreq * 1.0f) / aecpc->splitSampFreq; + // Sampling frequency multiplier (SWB is processed as 160 frame size). + aecpc->rate_factor = aecpc->splitSampFreq / 8000; - aecpc->sum = 0; - aecpc->counter = 0; - aecpc->checkBuffSize = 1; - aecpc->firstVal = 0; + aecpc->sum = 0; + aecpc->counter = 0; + aecpc->checkBuffSize = 1; + aecpc->firstVal = 0; - aecpc->startup_phase = 1; - aecpc->bufSizeStart = 0; - aecpc->checkBufSizeCtr = 0; - aecpc->msInSndCardBuf = 0; - aecpc->filtDelay = -1; // -1 indicates an initialized state. - aecpc->timeForDelayChange = 0; - aecpc->knownDelay = 0; - aecpc->lastDelayDiff = 0; + aecpc->startup_phase = 1; + aecpc->bufSizeStart = 0; + aecpc->checkBufSizeCtr = 0; + aecpc->msInSndCardBuf = 0; + aecpc->filtDelay = -1; // -1 indicates an initialized state. + aecpc->timeForDelayChange = 0; + aecpc->knownDelay = 0; + aecpc->lastDelayDiff = 0; - aecpc->skewFrCtr = 0; - aecpc->resample = kAecFalse; - aecpc->highSkewCtr = 0; - aecpc->skew = 0; + aecpc->skewFrCtr = 0; + aecpc->resample = kAecFalse; + aecpc->highSkewCtr = 0; + aecpc->skew = 0; - aecpc->farend_started = 0; + aecpc->farend_started = 0; - // Default settings. - aecConfig.nlpMode = kAecNlpModerate; - aecConfig.skewMode = kAecFalse; - aecConfig.metricsMode = kAecFalse; - aecConfig.delay_logging = kAecFalse; + // Default settings. + aecConfig.nlpMode = kAecNlpModerate; + aecConfig.skewMode = kAecFalse; + aecConfig.metricsMode = kAecFalse; + aecConfig.delay_logging = kAecFalse; - if (WebRtcAec_set_config(aecpc, aecConfig) == -1) { - aecpc->lastError = AEC_UNSPECIFIED_ERROR; - return -1; - } + if (WebRtcAec_set_config(aecpc, aecConfig) == -1) { + aecpc->lastError = AEC_UNSPECIFIED_ERROR; + return -1; + } #ifdef WEBRTC_AEC_DEBUG_DUMP - if (WebRtc_InitBuffer(aecpc->far_pre_buf_s16) == -1) { - aecpc->lastError = AEC_UNSPECIFIED_ERROR; - return -1; - } - WebRtc_MoveReadPtr(aecpc->far_pre_buf_s16, -PART_LEN); // Start overlap. + if (WebRtc_InitBuffer(aecpc->far_pre_buf_s16) == -1) { + aecpc->lastError = AEC_UNSPECIFIED_ERROR; + return -1; + } + WebRtc_MoveReadPtr(aecpc->far_pre_buf_s16, -PART_LEN); // Start overlap. #endif - return 0; + return 0; } // only buffer L band for farend -int32_t WebRtcAec_BufferFarend(void *aecInst, const int16_t *farend, - int16_t nrOfSamples) -{ - aecpc_t *aecpc = aecInst; - int32_t retVal = 0; - int newNrOfSamples = (int) nrOfSamples; - short newFarend[MAX_RESAMP_LEN]; - const int16_t* farend_ptr = farend; - float tmp_farend[MAX_RESAMP_LEN]; - const float* farend_float = tmp_farend; - float skew; - int i = 0; +int32_t WebRtcAec_BufferFarend(void* aecInst, + const int16_t* farend, + int16_t nrOfSamples) { + aecpc_t* aecpc = aecInst; + int32_t retVal = 0; + int newNrOfSamples = (int)nrOfSamples; + short newFarend[MAX_RESAMP_LEN]; + const int16_t* farend_ptr = farend; + float tmp_farend[MAX_RESAMP_LEN]; + const float* farend_float = tmp_farend; + float skew; + int i = 0; - if (farend == NULL) { - aecpc->lastError = AEC_NULL_POINTER_ERROR; - return -1; - } + if (farend == NULL) { + aecpc->lastError = AEC_NULL_POINTER_ERROR; + return -1; + } - if (aecpc->initFlag != initCheck) { - aecpc->lastError = AEC_UNINITIALIZED_ERROR; - return -1; - } + if (aecpc->initFlag != initCheck) { + aecpc->lastError = AEC_UNINITIALIZED_ERROR; + return -1; + } - // number of samples == 160 for SWB input - if (nrOfSamples != 80 && nrOfSamples != 160) { - aecpc->lastError = AEC_BAD_PARAMETER_ERROR; - return -1; - } + // number of samples == 160 for SWB input + if (nrOfSamples != 80 && nrOfSamples != 160) { + aecpc->lastError = AEC_BAD_PARAMETER_ERROR; + return -1; + } - skew = aecpc->skew; + skew = aecpc->skew; - if (aecpc->skewMode == kAecTrue && aecpc->resample == kAecTrue) { - // Resample and get a new number of samples - WebRtcAec_ResampleLinear(aecpc->resampler, farend, nrOfSamples, skew, - newFarend, &newNrOfSamples); - farend_ptr = (const int16_t*) newFarend; - } + if (aecpc->skewMode == kAecTrue && aecpc->resample == kAecTrue) { + // Resample and get a new number of samples + WebRtcAec_ResampleLinear(aecpc->resampler, + farend, + nrOfSamples, + skew, + newFarend, + &newNrOfSamples); + farend_ptr = (const int16_t*)newFarend; + } - aecpc->farend_started = 1; - WebRtcAec_SetSystemDelay(aecpc->aec, WebRtcAec_system_delay(aecpc->aec) + - newNrOfSamples); + aecpc->farend_started = 1; + WebRtcAec_SetSystemDelay(aecpc->aec, + WebRtcAec_system_delay(aecpc->aec) + newNrOfSamples); #ifdef WEBRTC_AEC_DEBUG_DUMP - WebRtc_WriteBuffer(aecpc->far_pre_buf_s16, farend_ptr, - (size_t) newNrOfSamples); + WebRtc_WriteBuffer( + aecpc->far_pre_buf_s16, farend_ptr, (size_t)newNrOfSamples); #endif - // Cast to float and write the time-domain data to |far_pre_buf|. - for (i = 0; i < newNrOfSamples; i++) { - tmp_farend[i] = (float) farend_ptr[i]; - } - WebRtc_WriteBuffer(aecpc->far_pre_buf, farend_float, - (size_t) newNrOfSamples); + // Cast to float and write the time-domain data to |far_pre_buf|. + for (i = 0; i < newNrOfSamples; i++) { + tmp_farend[i] = (float)farend_ptr[i]; + } + WebRtc_WriteBuffer(aecpc->far_pre_buf, farend_float, (size_t)newNrOfSamples); - // Transform to frequency domain if we have enough data. - while (WebRtc_available_read(aecpc->far_pre_buf) >= PART_LEN2) { - // We have enough data to pass to the FFT, hence read PART_LEN2 samples. - WebRtc_ReadBuffer(aecpc->far_pre_buf, (void**) &farend_float, tmp_farend, - PART_LEN2); + // Transform to frequency domain if we have enough data. + while (WebRtc_available_read(aecpc->far_pre_buf) >= PART_LEN2) { + // We have enough data to pass to the FFT, hence read PART_LEN2 samples. + WebRtc_ReadBuffer( + aecpc->far_pre_buf, (void**)&farend_float, tmp_farend, PART_LEN2); - WebRtcAec_BufferFarendPartition(aecpc->aec, farend_float); + WebRtcAec_BufferFarendPartition(aecpc->aec, farend_float); - // Rewind |far_pre_buf| PART_LEN samples for overlap before continuing. - WebRtc_MoveReadPtr(aecpc->far_pre_buf, -PART_LEN); + // Rewind |far_pre_buf| PART_LEN samples for overlap before continuing. + WebRtc_MoveReadPtr(aecpc->far_pre_buf, -PART_LEN); #ifdef WEBRTC_AEC_DEBUG_DUMP - WebRtc_ReadBuffer(aecpc->far_pre_buf_s16, (void**) &farend_ptr, newFarend, - PART_LEN2); - WebRtc_WriteBuffer(WebRtcAec_far_time_buf(aecpc->aec), - &farend_ptr[PART_LEN], 1); - WebRtc_MoveReadPtr(aecpc->far_pre_buf_s16, -PART_LEN); + WebRtc_ReadBuffer( + aecpc->far_pre_buf_s16, (void**)&farend_ptr, newFarend, PART_LEN2); + WebRtc_WriteBuffer( + WebRtcAec_far_time_buf(aecpc->aec), &farend_ptr[PART_LEN], 1); + WebRtc_MoveReadPtr(aecpc->far_pre_buf_s16, -PART_LEN); #endif - } + } - return retVal; + return retVal; } -int32_t WebRtcAec_Process(void *aecInst, const int16_t *nearend, - const int16_t *nearendH, int16_t *out, int16_t *outH, - int16_t nrOfSamples, int16_t msInSndCardBuf, - int32_t skew) -{ - aecpc_t *aecpc = aecInst; - int32_t retVal = 0; - if (nearend == NULL) { - aecpc->lastError = AEC_NULL_POINTER_ERROR; - return -1; - } +int32_t WebRtcAec_Process(void* aecInst, + const int16_t* nearend, + const int16_t* nearendH, + int16_t* out, + int16_t* outH, + int16_t nrOfSamples, + int16_t msInSndCardBuf, + int32_t skew) { + aecpc_t* aecpc = aecInst; + int32_t retVal = 0; + if (nearend == NULL) { + aecpc->lastError = AEC_NULL_POINTER_ERROR; + return -1; + } - if (out == NULL) { - aecpc->lastError = AEC_NULL_POINTER_ERROR; - return -1; - } + if (out == NULL) { + aecpc->lastError = AEC_NULL_POINTER_ERROR; + return -1; + } - if (aecpc->initFlag != initCheck) { - aecpc->lastError = AEC_UNINITIALIZED_ERROR; - return -1; - } + if (aecpc->initFlag != initCheck) { + aecpc->lastError = AEC_UNINITIALIZED_ERROR; + return -1; + } - // number of samples == 160 for SWB input - if (nrOfSamples != 80 && nrOfSamples != 160) { - aecpc->lastError = AEC_BAD_PARAMETER_ERROR; - return -1; - } + // number of samples == 160 for SWB input + if (nrOfSamples != 80 && nrOfSamples != 160) { + aecpc->lastError = AEC_BAD_PARAMETER_ERROR; + return -1; + } - // Check for valid pointers based on sampling rate - if (aecpc->sampFreq == 32000 && nearendH == NULL) { - aecpc->lastError = AEC_NULL_POINTER_ERROR; - return -1; - } + // Check for valid pointers based on sampling rate + if (aecpc->sampFreq == 32000 && nearendH == NULL) { + aecpc->lastError = AEC_NULL_POINTER_ERROR; + return -1; + } - if (msInSndCardBuf < 0) { - msInSndCardBuf = 0; - aecpc->lastError = AEC_BAD_PARAMETER_WARNING; - retVal = -1; - } - else if (msInSndCardBuf > kMaxTrustedDelayMs) { - // The clamping is now done in ProcessExtended/Normal(). - aecpc->lastError = AEC_BAD_PARAMETER_WARNING; - retVal = -1; - } + if (msInSndCardBuf < 0) { + msInSndCardBuf = 0; + aecpc->lastError = AEC_BAD_PARAMETER_WARNING; + retVal = -1; + } else if (msInSndCardBuf > kMaxTrustedDelayMs) { + // The clamping is now done in ProcessExtended/Normal(). + aecpc->lastError = AEC_BAD_PARAMETER_WARNING; + retVal = -1; + } - // This returns the value of aec->extended_filter_enabled. - if (WebRtcAec_delay_correction_enabled(aecpc->aec)) { - ProcessExtended(aecpc, nearend, nearendH, out, outH, nrOfSamples, - msInSndCardBuf, skew); - } else { - if (ProcessNormal(aecpc, nearend, nearendH, out, outH, nrOfSamples, - msInSndCardBuf, skew) != 0) { - retVal = -1; - } + // This returns the value of aec->extended_filter_enabled. + if (WebRtcAec_delay_correction_enabled(aecpc->aec)) { + ProcessExtended( + aecpc, nearend, nearendH, out, outH, nrOfSamples, msInSndCardBuf, skew); + } else { + if (ProcessNormal(aecpc, + nearend, + nearendH, + out, + outH, + nrOfSamples, + msInSndCardBuf, + skew) != 0) { + retVal = -1; } + } #ifdef WEBRTC_AEC_DEBUG_DUMP - { - int16_t far_buf_size_ms = (int16_t)(WebRtcAec_system_delay(aecpc->aec) / - (sampMsNb * aecpc->rate_factor)); - (void)fwrite(&far_buf_size_ms, 2, 1, aecpc->bufFile); - (void)fwrite(&aecpc->knownDelay, sizeof(aecpc->knownDelay), 1, - aecpc->delayFile); - } + { + int16_t far_buf_size_ms = (int16_t)(WebRtcAec_system_delay(aecpc->aec) / + (sampMsNb * aecpc->rate_factor)); + (void)fwrite(&far_buf_size_ms, 2, 1, aecpc->bufFile); + (void)fwrite( + &aecpc->knownDelay, sizeof(aecpc->knownDelay), 1, aecpc->delayFile); + } #endif - return retVal; + return retVal; } int WebRtcAec_set_config(void* handle, AecConfig config) { @@ -441,8 +461,9 @@ int WebRtcAec_set_config(void* handle, AecConfig config) { } self->skewMode = config.skewMode; - if (config.nlpMode != kAecNlpConservative && config.nlpMode != kAecNlpModerate - && config.nlpMode != kAecNlpAggressive) { + if (config.nlpMode != kAecNlpConservative && + config.nlpMode != kAecNlpModerate && + config.nlpMode != kAecNlpAggressive) { self->lastError = AEC_BAD_PARAMETER_ERROR; return -1; } @@ -457,14 +478,14 @@ int WebRtcAec_set_config(void* handle, AecConfig config) { return -1; } - WebRtcAec_SetConfigCore(self->aec, config.nlpMode, config.metricsMode, - config.delay_logging); + WebRtcAec_SetConfigCore( + self->aec, config.nlpMode, config.metricsMode, config.delay_logging); return 0; } int WebRtcAec_get_echo_status(void* handle, int* status) { aecpc_t* self = (aecpc_t*)handle; - if (status == NULL ) { + if (status == NULL) { self->lastError = AEC_NULL_POINTER_ERROR; return -1; } @@ -487,10 +508,10 @@ int WebRtcAec_GetMetrics(void* handle, AecMetrics* metrics) { Stats erle; Stats a_nlp; - if (handle == NULL ) { + if (handle == NULL) { return -1; } - if (metrics == NULL ) { + if (metrics == NULL) { self->lastError = AEC_NULL_POINTER_ERROR; return -1; } @@ -502,46 +523,46 @@ int WebRtcAec_GetMetrics(void* handle, AecMetrics* metrics) { WebRtcAec_GetEchoStats(self->aec, &erl, &erle, &a_nlp); // ERL - metrics->erl.instant = (int) erl.instant; + metrics->erl.instant = (int)erl.instant; if ((erl.himean > kOffsetLevel) && (erl.average > kOffsetLevel)) { // Use a mix between regular average and upper part average. dtmp = kUpWeight * erl.himean + (1 - kUpWeight) * erl.average; - metrics->erl.average = (int) dtmp; + metrics->erl.average = (int)dtmp; } else { metrics->erl.average = kOffsetLevel; } - metrics->erl.max = (int) erl.max; + metrics->erl.max = (int)erl.max; if (erl.min < (kOffsetLevel * (-1))) { - metrics->erl.min = (int) erl.min; + metrics->erl.min = (int)erl.min; } else { metrics->erl.min = kOffsetLevel; } // ERLE - metrics->erle.instant = (int) erle.instant; + metrics->erle.instant = (int)erle.instant; if ((erle.himean > kOffsetLevel) && (erle.average > kOffsetLevel)) { // Use a mix between regular average and upper part average. dtmp = kUpWeight * erle.himean + (1 - kUpWeight) * erle.average; - metrics->erle.average = (int) dtmp; + metrics->erle.average = (int)dtmp; } else { metrics->erle.average = kOffsetLevel; } - metrics->erle.max = (int) erle.max; + metrics->erle.max = (int)erle.max; if (erle.min < (kOffsetLevel * (-1))) { - metrics->erle.min = (int) erle.min; + metrics->erle.min = (int)erle.min; } else { metrics->erle.min = kOffsetLevel; } // RERL - if ((metrics->erl.average > kOffsetLevel) - && (metrics->erle.average > kOffsetLevel)) { + if ((metrics->erl.average > kOffsetLevel) && + (metrics->erle.average > kOffsetLevel)) { stmp = metrics->erl.average + metrics->erle.average; } else { stmp = kOffsetLevel; @@ -554,20 +575,20 @@ int WebRtcAec_GetMetrics(void* handle, AecMetrics* metrics) { metrics->rerl.min = stmp; // A_NLP - metrics->aNlp.instant = (int) a_nlp.instant; + metrics->aNlp.instant = (int)a_nlp.instant; if ((a_nlp.himean > kOffsetLevel) && (a_nlp.average > kOffsetLevel)) { // Use a mix between regular average and upper part average. dtmp = kUpWeight * a_nlp.himean + (1 - kUpWeight) * a_nlp.average; - metrics->aNlp.average = (int) dtmp; + metrics->aNlp.average = (int)dtmp; } else { metrics->aNlp.average = kOffsetLevel; } - metrics->aNlp.max = (int) a_nlp.max; + metrics->aNlp.max = (int)a_nlp.max; if (a_nlp.min < (kOffsetLevel * (-1))) { - metrics->aNlp.min = (int) a_nlp.min; + metrics->aNlp.min = (int)a_nlp.min; } else { metrics->aNlp.min = kOffsetLevel; } @@ -598,22 +619,25 @@ int WebRtcAec_GetDelayMetrics(void* handle, int* median, int* std) { return 0; } -int32_t WebRtcAec_get_error_code(void *aecInst) -{ - aecpc_t *aecpc = aecInst; - return aecpc->lastError; +int32_t WebRtcAec_get_error_code(void* aecInst) { + aecpc_t* aecpc = aecInst; + return aecpc->lastError; } AecCore* WebRtcAec_aec_core(void* handle) { if (!handle) { return NULL; } - return ((aecpc_t*) handle)->aec; + return ((aecpc_t*)handle)->aec; } -static int ProcessNormal(aecpc_t *aecpc, const int16_t *nearend, - const int16_t *nearendH, int16_t *out, int16_t *outH, - int16_t nrOfSamples, int16_t msInSndCardBuf, +static int ProcessNormal(aecpc_t* aecpc, + const int16_t* nearend, + const int16_t* nearendH, + int16_t* out, + int16_t* outH, + int16_t nrOfSamples, + int16_t msInSndCardBuf, int32_t skew) { int retVal = 0; short i; @@ -623,8 +647,8 @@ static int ProcessNormal(aecpc_t *aecpc, const int16_t *nearend, const float minSkewEst = -0.5f; const float maxSkewEst = 1.0f; - msInSndCardBuf = msInSndCardBuf > kMaxTrustedDelayMs ? - kMaxTrustedDelayMs : msInSndCardBuf; + msInSndCardBuf = + msInSndCardBuf > kMaxTrustedDelayMs ? kMaxTrustedDelayMs : msInSndCardBuf; // TODO(andrew): we need to investigate if this +10 is really wanted. msInSndCardBuf += 10; aecpc->msInSndCardBuf = msInSndCardBuf; @@ -632,27 +656,24 @@ static int ProcessNormal(aecpc_t *aecpc, const int16_t *nearend, if (aecpc->skewMode == kAecTrue) { if (aecpc->skewFrCtr < 25) { aecpc->skewFrCtr++; - } - else { + } else { retVal = WebRtcAec_GetSkew(aecpc->resampler, skew, &aecpc->skew); if (retVal == -1) { aecpc->skew = 0; aecpc->lastError = AEC_BAD_PARAMETER_WARNING; } - aecpc->skew /= aecpc->sampFactor*nrOfSamples; + aecpc->skew /= aecpc->sampFactor * nrOfSamples; if (aecpc->skew < 1.0e-3 && aecpc->skew > -1.0e-3) { aecpc->resample = kAecFalse; - } - else { + } else { aecpc->resample = kAecTrue; } if (aecpc->skew < minSkewEst) { aecpc->skew = minSkewEst; - } - else if (aecpc->skew > maxSkewEst) { + } else if (aecpc->skew > maxSkewEst) { aecpc->skew = maxSkewEst; } @@ -691,11 +712,10 @@ static int ProcessNormal(aecpc_t *aecpc, const int16_t *nearend, } if (abs(aecpc->firstVal - aecpc->msInSndCardBuf) < - WEBRTC_SPL_MAX(0.2 * aecpc->msInSndCardBuf, sampMsNb)) { + WEBRTC_SPL_MAX(0.2 * aecpc->msInSndCardBuf, sampMsNb)) { aecpc->sum += aecpc->msInSndCardBuf; aecpc->counter++; - } - else { + } else { aecpc->counter = 0; } @@ -703,9 +723,10 @@ static int ProcessNormal(aecpc_t *aecpc, const int16_t *nearend, // The far-end buffer size is determined in partitions of // PART_LEN samples. Use 75% of the average value of the system // delay as buffer size to start with. - aecpc->bufSizeStart = WEBRTC_SPL_MIN((3 * aecpc->sum * - aecpc->rate_factor * 8) / (4 * aecpc->counter * PART_LEN), - kMaxBufSizeStart); + aecpc->bufSizeStart = + WEBRTC_SPL_MIN((3 * aecpc->sum * aecpc->rate_factor * 8) / + (4 * aecpc->counter * PART_LEN), + kMaxBufSizeStart); // Buffer size has now been determined. aecpc->checkBuffSize = 0; } @@ -713,8 +734,9 @@ static int ProcessNormal(aecpc_t *aecpc, const int16_t *nearend, if (aecpc->checkBufSizeCtr * nBlocks10ms > 50) { // For really bad systems, don't disable the echo canceller for // more than 0.5 sec. - aecpc->bufSizeStart = WEBRTC_SPL_MIN((aecpc->msInSndCardBuf * - aecpc->rate_factor * 3) / 40, kMaxBufSizeStart); + aecpc->bufSizeStart = WEBRTC_SPL_MIN( + (aecpc->msInSndCardBuf * aecpc->rate_factor * 3) / 40, + kMaxBufSizeStart); aecpc->checkBuffSize = 0; } } @@ -764,9 +786,14 @@ static int ProcessNormal(aecpc_t *aecpc, const int16_t *nearend, return retVal; } -static void ProcessExtended(aecpc_t* self, const int16_t* near, - const int16_t* near_high, int16_t* out, int16_t* out_high, - int16_t num_samples, int16_t reported_delay_ms, int32_t skew) { +static void ProcessExtended(aecpc_t* self, + const int16_t* near, + const int16_t* near_high, + int16_t* out, + int16_t* out_high, + int16_t num_samples, + int16_t reported_delay_ms, + int32_t skew) { int i; const int num_frames = num_samples / FRAME_LEN; #if defined(WEBRTC_UNTRUSTED_DELAY) @@ -778,14 +805,16 @@ static void ProcessExtended(aecpc_t* self, const int16_t* near, // Due to the longer filter, we no longer add 10 ms to the reported delay // to reduce chance of non-causality. Instead we apply a minimum here to avoid // issues with the read pointer jumping around needlessly. - reported_delay_ms = reported_delay_ms < kMinTrustedDelayMs ? - kMinTrustedDelayMs : reported_delay_ms; + reported_delay_ms = reported_delay_ms < kMinTrustedDelayMs + ? kMinTrustedDelayMs + : reported_delay_ms; // If the reported delay appears to be bogus, we attempt to recover by using // the measured fixed delay values. We use >= here because higher layers // may already clamp to this maximum value, and we would otherwise not // detect it here. - reported_delay_ms = reported_delay_ms >= kMaxTrustedDelayMs ? - kFixedDelayMs : reported_delay_ms; + reported_delay_ms = reported_delay_ms >= kMaxTrustedDelayMs + ? kFixedDelayMs + : reported_delay_ms; #endif self->msInSndCardBuf = reported_delay_ms; @@ -804,10 +833,11 @@ static void ProcessExtended(aecpc_t* self, const int16_t* near, // action on the first frame. In the trusted delay case, we'll take the // current reported delay, unless it's less then our conservative // measurement. - int startup_size_ms = reported_delay_ms < kFixedDelayMs ? - kFixedDelayMs : reported_delay_ms; + int startup_size_ms = + reported_delay_ms < kFixedDelayMs ? kFixedDelayMs : reported_delay_ms; int overhead_elements = (WebRtcAec_system_delay(self->aec) - - startup_size_ms / 2 * self->rate_factor * 8) / PART_LEN; + startup_size_ms / 2 * self->rate_factor * 8) / + PART_LEN; WebRtcAec_MoveFarReadPtr(self->aec, overhead_elements); self->startup_phase = 0; } @@ -822,9 +852,12 @@ static void ProcessExtended(aecpc_t* self, const int16_t* near, WEBRTC_SPL_MAX(0, self->knownDelay + delay_diff_offset); for (i = 0; i < num_frames; ++i) { - WebRtcAec_ProcessFrame(self->aec, &near[FRAME_LEN * i], - &near_high[FRAME_LEN * i], adjusted_known_delay, - &out[FRAME_LEN * i], &out_high[FRAME_LEN * i]); + WebRtcAec_ProcessFrame(self->aec, + &near[FRAME_LEN * i], + &near_high[FRAME_LEN * i], + adjusted_known_delay, + &out[FRAME_LEN * i], + &out_high[FRAME_LEN * i]); } } } @@ -856,8 +889,8 @@ static void EstBufDelayNormal(aecpc_t* aecpc) { // We use -1 to signal an initialized state in the "extended" implementation; // compensate for that. aecpc->filtDelay = aecpc->filtDelay < 0 ? 0 : aecpc->filtDelay; - aecpc->filtDelay = WEBRTC_SPL_MAX(0, (short) (0.8 * aecpc->filtDelay + - 0.2 * current_delay)); + aecpc->filtDelay = + WEBRTC_SPL_MAX(0, (short)(0.8 * aecpc->filtDelay + 0.2 * current_delay)); delay_difference = aecpc->filtDelay - aecpc->knownDelay; if (delay_difference > 224) { @@ -878,7 +911,7 @@ static void EstBufDelayNormal(aecpc_t* aecpc) { aecpc->lastDelayDiff = delay_difference; if (aecpc->timeForDelayChange > 25) { - aecpc->knownDelay = WEBRTC_SPL_MAX((int) aecpc->filtDelay - 160, 0); + aecpc->knownDelay = WEBRTC_SPL_MAX((int)aecpc->filtDelay - 160, 0); } } @@ -909,8 +942,8 @@ static void EstBufDelayExtended(aecpc_t* self) { if (self->filtDelay == -1) { self->filtDelay = WEBRTC_SPL_MAX(0, 0.5 * current_delay); } else { - self->filtDelay = WEBRTC_SPL_MAX(0, (short) (0.95 * self->filtDelay + - 0.05 * current_delay)); + self->filtDelay = WEBRTC_SPL_MAX( + 0, (short)(0.95 * self->filtDelay + 0.05 * current_delay)); } delay_difference = self->filtDelay - self->knownDelay; @@ -932,6 +965,6 @@ static void EstBufDelayExtended(aecpc_t* self) { self->lastDelayDiff = delay_difference; if (self->timeForDelayChange > 25) { - self->knownDelay = WEBRTC_SPL_MAX((int) self->filtDelay - 256, 0); + self->knownDelay = WEBRTC_SPL_MAX((int)self->filtDelay - 256, 0); } } diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/include/echo_cancellation.h b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/include/echo_cancellation.h index c362a6766276..4c852cf64bfe 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/include/echo_cancellation.h +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/include/echo_cancellation.h @@ -14,32 +14,32 @@ #include "webrtc/typedefs.h" // Errors -#define AEC_UNSPECIFIED_ERROR 12000 -#define AEC_UNSUPPORTED_FUNCTION_ERROR 12001 -#define AEC_UNINITIALIZED_ERROR 12002 -#define AEC_NULL_POINTER_ERROR 12003 -#define AEC_BAD_PARAMETER_ERROR 12004 +#define AEC_UNSPECIFIED_ERROR 12000 +#define AEC_UNSUPPORTED_FUNCTION_ERROR 12001 +#define AEC_UNINITIALIZED_ERROR 12002 +#define AEC_NULL_POINTER_ERROR 12003 +#define AEC_BAD_PARAMETER_ERROR 12004 // Warnings -#define AEC_BAD_PARAMETER_WARNING 12050 +#define AEC_BAD_PARAMETER_WARNING 12050 enum { - kAecNlpConservative = 0, - kAecNlpModerate, - kAecNlpAggressive + kAecNlpConservative = 0, + kAecNlpModerate, + kAecNlpAggressive }; enum { - kAecFalse = 0, - kAecTrue + kAecFalse = 0, + kAecTrue }; typedef struct { - int16_t nlpMode; // default kAecNlpModerate - int16_t skewMode; // default kAecFalse - int16_t metricsMode; // default kAecFalse - int delay_logging; // default kAecFalse - //float realSkew; + int16_t nlpMode; // default kAecNlpModerate + int16_t skewMode; // default kAecFalse + int16_t metricsMode; // default kAecFalse + int delay_logging; // default kAecFalse + // float realSkew; } AecConfig; typedef struct { @@ -50,10 +50,10 @@ typedef struct { } AecLevel; typedef struct { - AecLevel rerl; - AecLevel erl; - AecLevel erle; - AecLevel aNlp; + AecLevel rerl; + AecLevel erl; + AecLevel erle; + AecLevel aNlp; } AecMetrics; struct AecCore; @@ -76,7 +76,7 @@ extern "C" { * int32_t return 0: OK * -1: error */ -int32_t WebRtcAec_Create(void **aecInst); +int32_t WebRtcAec_Create(void** aecInst); /* * This function releases the memory allocated by WebRtcAec_Create(). @@ -90,7 +90,7 @@ int32_t WebRtcAec_Create(void **aecInst); * int32_t return 0: OK * -1: error */ -int32_t WebRtcAec_Free(void *aecInst); +int32_t WebRtcAec_Free(void* aecInst); /* * Initializes an AEC instance. @@ -106,7 +106,7 @@ int32_t WebRtcAec_Free(void *aecInst); * int32_t return 0: OK * -1: error */ -int32_t WebRtcAec_Init(void *aecInst, int32_t sampFreq, int32_t scSampFreq); +int32_t WebRtcAec_Init(void* aecInst, int32_t sampFreq, int32_t scSampFreq); /* * Inserts an 80 or 160 sample block of data into the farend buffer. @@ -123,8 +123,8 @@ int32_t WebRtcAec_Init(void *aecInst, int32_t sampFreq, int32_t scSampFreq); * int32_t return 0: OK * -1: error */ -int32_t WebRtcAec_BufferFarend(void *aecInst, - const int16_t *farend, +int32_t WebRtcAec_BufferFarend(void* aecInst, + const int16_t* farend, int16_t nrOfSamples); /* @@ -153,11 +153,11 @@ int32_t WebRtcAec_BufferFarend(void *aecInst, * int32_t return 0: OK * -1: error */ -int32_t WebRtcAec_Process(void *aecInst, - const int16_t *nearend, - const int16_t *nearendH, - int16_t *out, - int16_t *outH, +int32_t WebRtcAec_Process(void* aecInst, + const int16_t* nearend, + const int16_t* nearendH, + int16_t* out, + int16_t* outH, int16_t nrOfSamples, int16_t msInSndCardBuf, int32_t skew); @@ -238,7 +238,7 @@ int WebRtcAec_GetDelayMetrics(void* handle, int* median, int* std); * ------------------------------------------------------------------- * int32_t return 11000-11100: error code */ -int32_t WebRtcAec_get_error_code(void *aecInst); +int32_t WebRtcAec_get_error_code(void* aecInst); // Returns a pointer to the low level AEC handle. // diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/system_delay_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/system_delay_unittest.cc index db37f0e83a8a..a19030ae3508 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/system_delay_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/system_delay_unittest.cc @@ -52,9 +52,7 @@ class SystemDelayTest : public ::testing::Test { }; SystemDelayTest::SystemDelayTest() - : handle_(NULL), - self_(NULL), - samples_per_frame_(0) { + : handle_(NULL), self_(NULL), samples_per_frame_(0) { // Dummy input data are set with more or less arbitrary non-zero values. memset(far_, 1, sizeof(far_)); memset(near_, 2, sizeof(near_)); @@ -74,7 +72,7 @@ void SystemDelayTest::TearDown() { // In SWB mode nothing is added to the buffer handling with respect to // functionality compared to WB. We therefore only verify behavior in NB and WB. -static const int kSampleRateHz[] = { 8000, 16000 }; +static const int kSampleRateHz[] = {8000, 16000}; static const size_t kNumSampleRates = sizeof(kSampleRateHz) / sizeof(*kSampleRateHz); @@ -100,8 +98,15 @@ void SystemDelayTest::Init(int sample_rate_hz) { void SystemDelayTest::RenderAndCapture(int device_buffer_ms) { EXPECT_EQ(0, WebRtcAec_BufferFarend(handle_, far_, samples_per_frame_)); - EXPECT_EQ(0, WebRtcAec_Process(handle_, near_, NULL, out_, NULL, - samples_per_frame_, device_buffer_ms, 0)); + EXPECT_EQ(0, + WebRtcAec_Process(handle_, + near_, + NULL, + out_, + NULL, + samples_per_frame_, + device_buffer_ms, + 0)); } int SystemDelayTest::BufferFillUp() { @@ -254,8 +259,15 @@ TEST_F(SystemDelayTest, CorrectDelayAfterStableBufferBuildUp) { // can make that assumption since we have a separate stability test. int process_time_ms = 0; for (; process_time_ms < kStableConvergenceMs; process_time_ms += 10) { - EXPECT_EQ(0, WebRtcAec_Process(handle_, near_, NULL, out_, NULL, - samples_per_frame_, kDeviceBufMs, 0)); + EXPECT_EQ(0, + WebRtcAec_Process(handle_, + near_, + NULL, + out_, + NULL, + samples_per_frame_, + kDeviceBufMs, + 0)); } // Verify that a buffer size has been established. EXPECT_EQ(0, self_->checkBuffSize); @@ -301,8 +313,15 @@ TEST_F(SystemDelayTest, CorrectDelayWhenBufferUnderrun) { // |kStableConvergenceMs| in the buffer. Keep on calling Process() until // we run out of data and verify that the system delay is non-negative. for (int j = 0; j <= kStableConvergenceMs; j += 10) { - EXPECT_EQ(0, WebRtcAec_Process(handle_, near_, NULL, out_, NULL, - samples_per_frame_, kDeviceBufMs, 0)); + EXPECT_EQ(0, + WebRtcAec_Process(handle_, + near_, + NULL, + out_, + NULL, + samples_per_frame_, + kDeviceBufMs, + 0)); EXPECT_LE(0, WebRtcAec_system_delay(self_->aec)); } } diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/aecm/aecm_core.c b/media/webrtc/trunk/webrtc/modules/audio_processing/aecm/aecm_core.c index db355156231d..0f34874612d8 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/aecm/aecm_core.c +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/aecm/aecm_core.c @@ -18,7 +18,7 @@ #include "webrtc/modules/audio_processing/aecm/include/echo_control_mobile.h" #include "webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h" #include "webrtc/modules/audio_processing/utility/ring_buffer.h" -#include "webrtc/system_wrappers/interface/compile_assert.h" +#include "webrtc/system_wrappers/interface/compile_assert_c.h" #include "webrtc/system_wrappers/interface/cpu_features_wrapper.h" #include "webrtc/typedefs.h" @@ -27,63 +27,7 @@ FILE *dfile; FILE *testfile; #endif -// Square root of Hanning window in Q14. -#if defined(WEBRTC_DETECT_ARM_NEON) || defined(WEBRTC_ARCH_ARM_NEON) -// Table is defined in an ARM assembly file. -extern const ALIGN8_BEG int16_t WebRtcAecm_kSqrtHanning[] ALIGN8_END; -#else -static const ALIGN8_BEG int16_t WebRtcAecm_kSqrtHanning[] ALIGN8_END = { - 0, 399, 798, 1196, 1594, 1990, 2386, 2780, 3172, - 3562, 3951, 4337, 4720, 5101, 5478, 5853, 6224, - 6591, 6954, 7313, 7668, 8019, 8364, 8705, 9040, - 9370, 9695, 10013, 10326, 10633, 10933, 11227, 11514, - 11795, 12068, 12335, 12594, 12845, 13089, 13325, 13553, - 13773, 13985, 14189, 14384, 14571, 14749, 14918, 15079, - 15231, 15373, 15506, 15631, 15746, 15851, 15947, 16034, - 16111, 16179, 16237, 16286, 16325, 16354, 16373, 16384 -}; -#endif - -//Q15 alpha = 0.99439986968132 const Factor for magnitude approximation -static const uint16_t kAlpha1 = 32584; -//Q15 beta = 0.12967166976970 const Factor for magnitude approximation -static const uint16_t kBeta1 = 4249; -//Q15 alpha = 0.94234827210087 const Factor for magnitude approximation -static const uint16_t kAlpha2 = 30879; -//Q15 beta = 0.33787806009150 const Factor for magnitude approximation -static const uint16_t kBeta2 = 11072; -//Q15 alpha = 0.82247698684306 const Factor for magnitude approximation -static const uint16_t kAlpha3 = 26951; -//Q15 beta = 0.57762063060713 const Factor for magnitude approximation -static const uint16_t kBeta3 = 18927; - -// Initialization table for echo channel in 8 kHz -static const int16_t kChannelStored8kHz[PART_LEN1] = { - 2040, 1815, 1590, 1498, 1405, 1395, 1385, 1418, - 1451, 1506, 1562, 1644, 1726, 1804, 1882, 1918, - 1953, 1982, 2010, 2025, 2040, 2034, 2027, 2021, - 2014, 1997, 1980, 1925, 1869, 1800, 1732, 1683, - 1635, 1604, 1572, 1545, 1517, 1481, 1444, 1405, - 1367, 1331, 1294, 1270, 1245, 1239, 1233, 1247, - 1260, 1282, 1303, 1338, 1373, 1407, 1441, 1470, - 1499, 1524, 1549, 1565, 1582, 1601, 1621, 1649, - 1676 -}; - -// Initialization table for echo channel in 16 kHz -static const int16_t kChannelStored16kHz[PART_LEN1] = { - 2040, 1590, 1405, 1385, 1451, 1562, 1726, 1882, - 1953, 2010, 2040, 2027, 2014, 1980, 1869, 1732, - 1635, 1572, 1517, 1444, 1367, 1294, 1245, 1233, - 1260, 1303, 1373, 1441, 1499, 1549, 1582, 1621, - 1676, 1741, 1802, 1861, 1921, 1983, 2040, 2102, - 2170, 2265, 2375, 2515, 2651, 2781, 2922, 3075, - 3253, 3471, 3738, 3976, 4151, 4258, 4308, 4288, - 4270, 4253, 4237, 4179, 4086, 3947, 3757, 3484, - 3153 -}; - -static const int16_t kCosTable[] = { +const int16_t WebRtcAecm_kCosTable[] = { 8192, 8190, 8187, 8180, 8172, 8160, 8147, 8130, 8112, 8091, 8067, 8041, 8012, 7982, 7948, 7912, 7874, 7834, 7791, 7745, 7697, 7647, 7595, 7540, 7483, 7424, 7362, @@ -126,7 +70,7 @@ static const int16_t kCosTable[] = { 8091, 8112, 8130, 8147, 8160, 8172, 8180, 8187, 8190 }; -static const int16_t kSinTable[] = { +const int16_t WebRtcAecm_kSinTable[] = { 0, 142, 285, 428, 571, 713, 856, 998, 1140, 1281, 1422, 1563, 1703, 1842, 1981, 2120, 2258, 2395, 2531, 2667, 2801, 2935, 3068, 3200, @@ -174,15 +118,31 @@ static const int16_t kSinTable[] = { -1140, -998, -856, -713, -571, -428, -285, -142 }; -static const int16_t kNoiseEstQDomain = 15; -static const int16_t kNoiseEstIncCount = 5; +// Initialization table for echo channel in 8 kHz +static const int16_t kChannelStored8kHz[PART_LEN1] = { + 2040, 1815, 1590, 1498, 1405, 1395, 1385, 1418, + 1451, 1506, 1562, 1644, 1726, 1804, 1882, 1918, + 1953, 1982, 2010, 2025, 2040, 2034, 2027, 2021, + 2014, 1997, 1980, 1925, 1869, 1800, 1732, 1683, + 1635, 1604, 1572, 1545, 1517, 1481, 1444, 1405, + 1367, 1331, 1294, 1270, 1245, 1239, 1233, 1247, + 1260, 1282, 1303, 1338, 1373, 1407, 1441, 1470, + 1499, 1524, 1549, 1565, 1582, 1601, 1621, 1649, + 1676 +}; -static void ComfortNoise(AecmCore_t* aecm, - const uint16_t* dfa, - complex16_t* out, - const int16_t* lambda); - -static int16_t CalcSuppressionGain(AecmCore_t * const aecm); +// Initialization table for echo channel in 16 kHz +static const int16_t kChannelStored16kHz[PART_LEN1] = { + 2040, 1590, 1405, 1385, 1451, 1562, 1726, 1882, + 1953, 2010, 2040, 2027, 2014, 1980, 1869, 1732, + 1635, 1572, 1517, 1444, 1367, 1294, 1245, 1233, + 1260, 1303, 1373, 1441, 1499, 1549, 1582, 1621, + 1676, 1741, 1802, 1861, 1921, 1983, 2040, 2102, + 2170, 2265, 2375, 2515, 2651, 2781, 2922, 3075, + 3253, 3471, 3738, 3976, 4151, 4258, 4308, 4288, + 4270, 4253, 4237, 4179, 4086, 3947, 3757, 3484, + 3153 +}; // Moves the pointer to the next entry and inserts |far_spectrum| and // corresponding Q-domain in its buffer. @@ -192,9 +152,9 @@ static int16_t CalcSuppressionGain(AecmCore_t * const aecm); // - far_spectrum : Pointer to the far end spectrum // - far_q : Q-domain of far end spectrum // -static void UpdateFarHistory(AecmCore_t* self, - uint16_t* far_spectrum, - int far_q) { +void WebRtcAecm_UpdateFarHistory(AecmCore_t* self, + uint16_t* far_spectrum, + int far_q) { // Get new buffer position self->far_history_pos++; if (self->far_history_pos >= MAX_DELAY) { @@ -225,7 +185,9 @@ static void UpdateFarHistory(AecmCore_t* self, // - far_spectrum : Pointer to the aligned far end spectrum // NULL - Error // -static const uint16_t* AlignedFarend(AecmCore_t* self, int* far_q, int delay) { +const uint16_t* WebRtcAecm_AlignedFarend(AecmCore_t* self, + int* far_q, + int delay) { int buffer_position = 0; assert(self != NULL); buffer_position = self->far_history_pos - delay; @@ -304,6 +266,9 @@ int WebRtcAecm_CreateCore(AecmCore_t **aecmInst) aecm = NULL; return -1; } + // TODO(bjornv): Explicitly disable robust delay validation until no + // performance regression has been established. Then remove the line. + WebRtc_enable_robust_validation(aecm->delay_estimator, 0); aecm->real_fft = WebRtcSpl_CreateRealFFT(PART_LEN_SHIFT); if (aecm->real_fft == NULL) { @@ -349,85 +314,6 @@ void WebRtcAecm_InitEchoPathCore(AecmCore_t* aecm, const int16_t* echo_path) aecm->mseChannelCount = 0; } -static void WindowAndFFT(AecmCore_t* aecm, - int16_t* fft, - const int16_t* time_signal, - complex16_t* freq_signal, - int time_signal_scaling) { - int i = 0; - - // FFT of signal - for (i = 0; i < PART_LEN; i++) { - // Window time domain signal and insert into real part of - // transformation array |fft| - fft[i] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT( - (time_signal[i] << time_signal_scaling), - WebRtcAecm_kSqrtHanning[i], - 14); - fft[PART_LEN + i] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT( - (time_signal[i + PART_LEN] << time_signal_scaling), - WebRtcAecm_kSqrtHanning[PART_LEN - i], - 14); - } - - // Do forward FFT, then take only the first PART_LEN complex samples, - // and change signs of the imaginary parts. - WebRtcSpl_RealForwardFFT(aecm->real_fft, fft, (int16_t*)freq_signal); - for (i = 0; i < PART_LEN; i++) { - freq_signal[i].imag = -freq_signal[i].imag; - } -} - -static void InverseFFTAndWindow(AecmCore_t* aecm, - int16_t* fft, - complex16_t* efw, - int16_t* output, - const int16_t* nearendClean) -{ - int i, j, outCFFT; - int32_t tmp32no1; - // Reuse |efw| for the inverse FFT output after transferring - // the contents to |fft|. - int16_t* ifft_out = (int16_t*)efw; - - // Synthesis - for (i = 1, j = 2; i < PART_LEN; i += 1, j += 2) { - fft[j] = efw[i].real; - fft[j + 1] = -efw[i].imag; - } - fft[0] = efw[0].real; - fft[1] = -efw[0].imag; - - fft[PART_LEN2] = efw[PART_LEN].real; - fft[PART_LEN2 + 1] = -efw[PART_LEN].imag; - - // Inverse FFT. Keep outCFFT to scale the samples in the next block. - outCFFT = WebRtcSpl_RealInverseFFT(aecm->real_fft, fft, ifft_out); - for (i = 0; i < PART_LEN; i++) { - ifft_out[i] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND( - ifft_out[i], WebRtcAecm_kSqrtHanning[i], 14); - tmp32no1 = WEBRTC_SPL_SHIFT_W32((int32_t)ifft_out[i], - outCFFT - aecm->dfaCleanQDomain); - output[i] = (int16_t)WEBRTC_SPL_SAT(WEBRTC_SPL_WORD16_MAX, - tmp32no1 + aecm->outBuf[i], WEBRTC_SPL_WORD16_MIN); - - tmp32no1 = WEBRTC_SPL_MUL_16_16_RSFT(ifft_out[PART_LEN + i], - WebRtcAecm_kSqrtHanning[PART_LEN - i], 14); - tmp32no1 = WEBRTC_SPL_SHIFT_W32(tmp32no1, - outCFFT - aecm->dfaCleanQDomain); - aecm->outBuf[i] = (int16_t)WEBRTC_SPL_SAT( - WEBRTC_SPL_WORD16_MAX, tmp32no1, WEBRTC_SPL_WORD16_MIN); - } - - // Copy the current block to the old position (aecm->outBuf is shifted elsewhere) - memcpy(aecm->xBuf, aecm->xBuf + PART_LEN, sizeof(int16_t) * PART_LEN); - memcpy(aecm->dBufNoisy, aecm->dBufNoisy + PART_LEN, sizeof(int16_t) * PART_LEN); - if (nearendClean != NULL) - { - memcpy(aecm->dBufClean, aecm->dBufClean + PART_LEN, sizeof(int16_t) * PART_LEN); - } -} - static void CalcLinearEnergiesC(AecmCore_t* aecm, const uint16_t* far_spectrum, int32_t* echo_est, @@ -507,6 +393,18 @@ static void WebRtcAecm_InitNeon(void) } #endif +// Initialize function pointers for MIPS platform. +#if defined(MIPS32_LE) +static void WebRtcAecm_InitMips(void) +{ +#if defined(MIPS_DSP_R1_LE) + WebRtcAecm_StoreAdaptiveChannel = WebRtcAecm_StoreAdaptiveChannel_mips; + WebRtcAecm_ResetAdaptiveChannel = WebRtcAecm_ResetAdaptiveChannel_mips; +#endif + WebRtcAecm_CalcLinearEnergies = WebRtcAecm_CalcLinearEnergies_mips; +} +#endif + // WebRtcAecm_InitCore(...) // // This function initializes the AECM instant created with WebRtcAecm_CreateCore(...) @@ -644,6 +542,9 @@ int WebRtcAecm_InitCore(AecmCore_t * const aecm, int samplingFreq) WebRtcAecm_InitNeon(); #endif +#if defined(MIPS32_LE) + WebRtcAecm_InitMips(); +#endif return 0; } @@ -1263,7 +1164,7 @@ void WebRtcAecm_UpdateChannel(AecmCore_t * aecm, // level (Q14). // // -static int16_t CalcSuppressionGain(AecmCore_t * const aecm) +int16_t WebRtcAecm_CalcSuppressionGain(AecmCore_t * const aecm) { int32_t tmp32no1; @@ -1332,639 +1233,6 @@ static int16_t CalcSuppressionGain(AecmCore_t * const aecm) return aecm->supGain; } -// Transforms a time domain signal into the frequency domain, outputting the -// complex valued signal, absolute value and sum of absolute values. -// -// time_signal [in] Pointer to time domain signal -// freq_signal_real [out] Pointer to real part of frequency domain array -// freq_signal_imag [out] Pointer to imaginary part of frequency domain -// array -// freq_signal_abs [out] Pointer to absolute value of frequency domain -// array -// freq_signal_sum_abs [out] Pointer to the sum of all absolute values in -// the frequency domain array -// return value The Q-domain of current frequency values -// -static int TimeToFrequencyDomain(AecmCore_t* aecm, - const int16_t* time_signal, - complex16_t* freq_signal, - uint16_t* freq_signal_abs, - uint32_t* freq_signal_sum_abs) -{ - int i = 0; - int time_signal_scaling = 0; - - int32_t tmp32no1 = 0; - int32_t tmp32no2 = 0; - - // In fft_buf, +16 for 32-byte alignment. - int16_t fft_buf[PART_LEN4 + 16]; - int16_t *fft = (int16_t *) (((uintptr_t) fft_buf + 31) & ~31); - - int16_t tmp16no1; -#ifndef WEBRTC_ARCH_ARM_V7 - int16_t tmp16no2; -#endif -#ifdef AECM_WITH_ABS_APPROX - int16_t max_value = 0; - int16_t min_value = 0; - uint16_t alpha = 0; - uint16_t beta = 0; -#endif - -#ifdef AECM_DYNAMIC_Q - tmp16no1 = WebRtcSpl_MaxAbsValueW16(time_signal, PART_LEN2); - time_signal_scaling = WebRtcSpl_NormW16(tmp16no1); -#endif - - WindowAndFFT(aecm, fft, time_signal, freq_signal, time_signal_scaling); - - // Extract imaginary and real part, calculate the magnitude for all frequency bins - freq_signal[0].imag = 0; - freq_signal[PART_LEN].imag = 0; - freq_signal_abs[0] = (uint16_t)WEBRTC_SPL_ABS_W16( - freq_signal[0].real); - freq_signal_abs[PART_LEN] = (uint16_t)WEBRTC_SPL_ABS_W16( - freq_signal[PART_LEN].real); - (*freq_signal_sum_abs) = (uint32_t)(freq_signal_abs[0]) + - (uint32_t)(freq_signal_abs[PART_LEN]); - - for (i = 1; i < PART_LEN; i++) - { - if (freq_signal[i].real == 0) - { - freq_signal_abs[i] = (uint16_t)WEBRTC_SPL_ABS_W16( - freq_signal[i].imag); - } - else if (freq_signal[i].imag == 0) - { - freq_signal_abs[i] = (uint16_t)WEBRTC_SPL_ABS_W16( - freq_signal[i].real); - } - else - { - // Approximation for magnitude of complex fft output - // magn = sqrt(real^2 + imag^2) - // magn ~= alpha * max(|imag|,|real|) + beta * min(|imag|,|real|) - // - // The parameters alpha and beta are stored in Q15 - -#ifdef AECM_WITH_ABS_APPROX - tmp16no1 = WEBRTC_SPL_ABS_W16(freq_signal[i].real); - tmp16no2 = WEBRTC_SPL_ABS_W16(freq_signal[i].imag); - - if(tmp16no1 > tmp16no2) - { - max_value = tmp16no1; - min_value = tmp16no2; - } else - { - max_value = tmp16no2; - min_value = tmp16no1; - } - - // Magnitude in Q(-6) - if ((max_value >> 2) > min_value) - { - alpha = kAlpha1; - beta = kBeta1; - } else if ((max_value >> 1) > min_value) - { - alpha = kAlpha2; - beta = kBeta2; - } else - { - alpha = kAlpha3; - beta = kBeta3; - } - tmp16no1 = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(max_value, - alpha, - 15); - tmp16no2 = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(min_value, - beta, - 15); - freq_signal_abs[i] = (uint16_t)tmp16no1 + - (uint16_t)tmp16no2; -#else -#ifdef WEBRTC_ARCH_ARM_V7 - __asm __volatile( - "smulbb %[tmp32no1], %[real], %[real]\n\t" - "smlabb %[tmp32no2], %[imag], %[imag], %[tmp32no1]\n\t" - :[tmp32no1]"+r"(tmp32no1), - [tmp32no2]"=r"(tmp32no2) - :[real]"r"(freq_signal[i].real), - [imag]"r"(freq_signal[i].imag) - ); -#else - tmp16no1 = WEBRTC_SPL_ABS_W16(freq_signal[i].real); - tmp16no2 = WEBRTC_SPL_ABS_W16(freq_signal[i].imag); - tmp32no1 = WEBRTC_SPL_MUL_16_16(tmp16no1, tmp16no1); - tmp32no2 = WEBRTC_SPL_MUL_16_16(tmp16no2, tmp16no2); - tmp32no2 = WEBRTC_SPL_ADD_SAT_W32(tmp32no1, tmp32no2); -#endif // WEBRTC_ARCH_ARM_V7 - tmp32no1 = WebRtcSpl_SqrtFloor(tmp32no2); - - freq_signal_abs[i] = (uint16_t)tmp32no1; -#endif // AECM_WITH_ABS_APPROX - } - (*freq_signal_sum_abs) += (uint32_t)freq_signal_abs[i]; - } - - return time_signal_scaling; -} - -int WebRtcAecm_ProcessBlock(AecmCore_t * aecm, - const int16_t * farend, - const int16_t * nearendNoisy, - const int16_t * nearendClean, - int16_t * output) -{ - int i; - - uint32_t xfaSum; - uint32_t dfaNoisySum; - uint32_t dfaCleanSum; - uint32_t echoEst32Gained; - uint32_t tmpU32; - - int32_t tmp32no1; - - uint16_t xfa[PART_LEN1]; - uint16_t dfaNoisy[PART_LEN1]; - uint16_t dfaClean[PART_LEN1]; - uint16_t* ptrDfaClean = dfaClean; - const uint16_t* far_spectrum_ptr = NULL; - - // 32 byte aligned buffers (with +8 or +16). - // TODO (kma): define fft with complex16_t. - int16_t fft_buf[PART_LEN4 + 2 + 16]; // +2 to make a loop safe. - int32_t echoEst32_buf[PART_LEN1 + 8]; - int32_t dfw_buf[PART_LEN2 + 8]; - int32_t efw_buf[PART_LEN2 + 8]; - - int16_t* fft = (int16_t*) (((uintptr_t) fft_buf + 31) & ~ 31); - int32_t* echoEst32 = (int32_t*) (((uintptr_t) echoEst32_buf + 31) & ~ 31); - complex16_t* dfw = (complex16_t*) (((uintptr_t) dfw_buf + 31) & ~ 31); - complex16_t* efw = (complex16_t*) (((uintptr_t) efw_buf + 31) & ~ 31); - - int16_t hnl[PART_LEN1]; - int16_t numPosCoef = 0; - int16_t nlpGain = ONE_Q14; - int delay; - int16_t tmp16no1; - int16_t tmp16no2; - int16_t mu; - int16_t supGain; - int16_t zeros32, zeros16; - int16_t zerosDBufNoisy, zerosDBufClean, zerosXBuf; - int far_q; - int16_t resolutionDiff, qDomainDiff; - - const int kMinPrefBand = 4; - const int kMaxPrefBand = 24; - int32_t avgHnl32 = 0; - - // Determine startup state. There are three states: - // (0) the first CONV_LEN blocks - // (1) another CONV_LEN blocks - // (2) the rest - - if (aecm->startupState < 2) - { - aecm->startupState = (aecm->totCount >= CONV_LEN) + (aecm->totCount >= CONV_LEN2); - } - // END: Determine startup state - - // Buffer near and far end signals - memcpy(aecm->xBuf + PART_LEN, farend, sizeof(int16_t) * PART_LEN); - memcpy(aecm->dBufNoisy + PART_LEN, nearendNoisy, sizeof(int16_t) * PART_LEN); - if (nearendClean != NULL) - { - memcpy(aecm->dBufClean + PART_LEN, nearendClean, sizeof(int16_t) * PART_LEN); - } - - // Transform far end signal from time domain to frequency domain. - far_q = TimeToFrequencyDomain(aecm, - aecm->xBuf, - dfw, - xfa, - &xfaSum); - - // Transform noisy near end signal from time domain to frequency domain. - zerosDBufNoisy = TimeToFrequencyDomain(aecm, - aecm->dBufNoisy, - dfw, - dfaNoisy, - &dfaNoisySum); - aecm->dfaNoisyQDomainOld = aecm->dfaNoisyQDomain; - aecm->dfaNoisyQDomain = (int16_t)zerosDBufNoisy; - - - if (nearendClean == NULL) - { - ptrDfaClean = dfaNoisy; - aecm->dfaCleanQDomainOld = aecm->dfaNoisyQDomainOld; - aecm->dfaCleanQDomain = aecm->dfaNoisyQDomain; - dfaCleanSum = dfaNoisySum; - } else - { - // Transform clean near end signal from time domain to frequency domain. - zerosDBufClean = TimeToFrequencyDomain(aecm, - aecm->dBufClean, - dfw, - dfaClean, - &dfaCleanSum); - aecm->dfaCleanQDomainOld = aecm->dfaCleanQDomain; - aecm->dfaCleanQDomain = (int16_t)zerosDBufClean; - } - - // Get the delay - // Save far-end history and estimate delay - UpdateFarHistory(aecm, xfa, far_q); - if (WebRtc_AddFarSpectrumFix(aecm->delay_estimator_farend, xfa, PART_LEN1, - far_q) == -1) { - return -1; - } - delay = WebRtc_DelayEstimatorProcessFix(aecm->delay_estimator, - dfaNoisy, - PART_LEN1, - zerosDBufNoisy); - if (delay == -1) - { - return -1; - } - else if (delay == -2) - { - // If the delay is unknown, we assume zero. - // NOTE: this will have to be adjusted if we ever add lookahead. - delay = 0; - } - - if (aecm->fixedDelay >= 0) - { - // Use fixed delay - delay = aecm->fixedDelay; - } - - // Get aligned far end spectrum - far_spectrum_ptr = AlignedFarend(aecm, &far_q, delay); - zerosXBuf = (int16_t) far_q; - if (far_spectrum_ptr == NULL) - { - return -1; - } - - // Calculate log(energy) and update energy threshold levels - WebRtcAecm_CalcEnergies(aecm, - far_spectrum_ptr, - zerosXBuf, - dfaNoisySum, - echoEst32); - - // Calculate stepsize - mu = WebRtcAecm_CalcStepSize(aecm); - - // Update counters - aecm->totCount++; - - // This is the channel estimation algorithm. - // It is base on NLMS but has a variable step length, which was calculated above. - WebRtcAecm_UpdateChannel(aecm, far_spectrum_ptr, zerosXBuf, dfaNoisy, mu, echoEst32); - supGain = CalcSuppressionGain(aecm); - - - // Calculate Wiener filter hnl[] - for (i = 0; i < PART_LEN1; i++) - { - // Far end signal through channel estimate in Q8 - // How much can we shift right to preserve resolution - tmp32no1 = echoEst32[i] - aecm->echoFilt[i]; - aecm->echoFilt[i] += WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_32_16(tmp32no1, 50), 8); - - zeros32 = WebRtcSpl_NormW32(aecm->echoFilt[i]) + 1; - zeros16 = WebRtcSpl_NormW16(supGain) + 1; - if (zeros32 + zeros16 > 16) - { - // Multiplication is safe - // Result in Q(RESOLUTION_CHANNEL+RESOLUTION_SUPGAIN+aecm->xfaQDomainBuf[diff]) - echoEst32Gained = WEBRTC_SPL_UMUL_32_16((uint32_t)aecm->echoFilt[i], - (uint16_t)supGain); - resolutionDiff = 14 - RESOLUTION_CHANNEL16 - RESOLUTION_SUPGAIN; - resolutionDiff += (aecm->dfaCleanQDomain - zerosXBuf); - } else - { - tmp16no1 = 17 - zeros32 - zeros16; - resolutionDiff = 14 + tmp16no1 - RESOLUTION_CHANNEL16 - RESOLUTION_SUPGAIN; - resolutionDiff += (aecm->dfaCleanQDomain - zerosXBuf); - if (zeros32 > tmp16no1) - { - echoEst32Gained = WEBRTC_SPL_UMUL_32_16((uint32_t)aecm->echoFilt[i], - (uint16_t)WEBRTC_SPL_RSHIFT_W16(supGain, - tmp16no1)); // Q-(RESOLUTION_CHANNEL+RESOLUTION_SUPGAIN-16) - } else - { - // Result in Q-(RESOLUTION_CHANNEL+RESOLUTION_SUPGAIN-16) - echoEst32Gained = WEBRTC_SPL_UMUL_32_16( - (uint32_t)WEBRTC_SPL_RSHIFT_W32(aecm->echoFilt[i], tmp16no1), - (uint16_t)supGain); - } - } - - zeros16 = WebRtcSpl_NormW16(aecm->nearFilt[i]); - if ((zeros16 < (aecm->dfaCleanQDomain - aecm->dfaCleanQDomainOld)) - & (aecm->nearFilt[i])) - { - tmp16no1 = WEBRTC_SPL_SHIFT_W16(aecm->nearFilt[i], zeros16); - qDomainDiff = zeros16 - aecm->dfaCleanQDomain + aecm->dfaCleanQDomainOld; - } else - { - tmp16no1 = WEBRTC_SPL_SHIFT_W16(aecm->nearFilt[i], - aecm->dfaCleanQDomain - aecm->dfaCleanQDomainOld); - qDomainDiff = 0; - } - tmp16no2 = WEBRTC_SPL_SHIFT_W16(ptrDfaClean[i], qDomainDiff); - tmp32no1 = (int32_t)(tmp16no2 - tmp16no1); - tmp16no2 = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32no1, 4); - tmp16no2 += tmp16no1; - zeros16 = WebRtcSpl_NormW16(tmp16no2); - if ((tmp16no2) & (-qDomainDiff > zeros16)) - { - aecm->nearFilt[i] = WEBRTC_SPL_WORD16_MAX; - } else - { - aecm->nearFilt[i] = WEBRTC_SPL_SHIFT_W16(tmp16no2, -qDomainDiff); - } - - // Wiener filter coefficients, resulting hnl in Q14 - if (echoEst32Gained == 0) - { - hnl[i] = ONE_Q14; - } else if (aecm->nearFilt[i] == 0) - { - hnl[i] = 0; - } else - { - // Multiply the suppression gain - // Rounding - echoEst32Gained += (uint32_t)(aecm->nearFilt[i] >> 1); - tmpU32 = WebRtcSpl_DivU32U16(echoEst32Gained, (uint16_t)aecm->nearFilt[i]); - - // Current resolution is - // Q-(RESOLUTION_CHANNEL + RESOLUTION_SUPGAIN - max(0, 17 - zeros16 - zeros32)) - // Make sure we are in Q14 - tmp32no1 = (int32_t)WEBRTC_SPL_SHIFT_W32(tmpU32, resolutionDiff); - if (tmp32no1 > ONE_Q14) - { - hnl[i] = 0; - } else if (tmp32no1 < 0) - { - hnl[i] = ONE_Q14; - } else - { - // 1-echoEst/dfa - hnl[i] = ONE_Q14 - (int16_t)tmp32no1; - if (hnl[i] < 0) - { - hnl[i] = 0; - } - } - } - if (hnl[i]) - { - numPosCoef++; - } - } - // Only in wideband. Prevent the gain in upper band from being larger than - // in lower band. - if (aecm->mult == 2) - { - // TODO(bjornv): Investigate if the scaling of hnl[i] below can cause - // speech distortion in double-talk. - for (i = 0; i < PART_LEN1; i++) - { - hnl[i] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(hnl[i], hnl[i], 14); - } - - for (i = kMinPrefBand; i <= kMaxPrefBand; i++) - { - avgHnl32 += (int32_t)hnl[i]; - } - assert(kMaxPrefBand - kMinPrefBand + 1 > 0); - avgHnl32 /= (kMaxPrefBand - kMinPrefBand + 1); - - for (i = kMaxPrefBand; i < PART_LEN1; i++) - { - if (hnl[i] > (int16_t)avgHnl32) - { - hnl[i] = (int16_t)avgHnl32; - } - } - } - - // Calculate NLP gain, result is in Q14 - if (aecm->nlpFlag) - { - for (i = 0; i < PART_LEN1; i++) - { - // Truncate values close to zero and one. - if (hnl[i] > NLP_COMP_HIGH) - { - hnl[i] = ONE_Q14; - } else if (hnl[i] < NLP_COMP_LOW) - { - hnl[i] = 0; - } - - // Remove outliers - if (numPosCoef < 3) - { - nlpGain = 0; - } else - { - nlpGain = ONE_Q14; - } - - // NLP - if ((hnl[i] == ONE_Q14) && (nlpGain == ONE_Q14)) - { - hnl[i] = ONE_Q14; - } else - { - hnl[i] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(hnl[i], nlpGain, 14); - } - - // multiply with Wiener coefficients - efw[i].real = (int16_t)(WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(dfw[i].real, - hnl[i], 14)); - efw[i].imag = (int16_t)(WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(dfw[i].imag, - hnl[i], 14)); - } - } - else - { - // multiply with Wiener coefficients - for (i = 0; i < PART_LEN1; i++) - { - efw[i].real = (int16_t)(WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(dfw[i].real, - hnl[i], 14)); - efw[i].imag = (int16_t)(WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(dfw[i].imag, - hnl[i], 14)); - } - } - - if (aecm->cngMode == AecmTrue) - { - ComfortNoise(aecm, ptrDfaClean, efw, hnl); - } - - InverseFFTAndWindow(aecm, fft, efw, output, nearendClean); - - return 0; -} - - -// Generate comfort noise and add to output signal. -// -// \param[in] aecm Handle of the AECM instance. -// \param[in] dfa Absolute value of the nearend signal (Q[aecm->dfaQDomain]). -// \param[in,out] outReal Real part of the output signal (Q[aecm->dfaQDomain]). -// \param[in,out] outImag Imaginary part of the output signal (Q[aecm->dfaQDomain]). -// \param[in] lambda Suppression gain with which to scale the noise level (Q14). -// -static void ComfortNoise(AecmCore_t* aecm, - const uint16_t* dfa, - complex16_t* out, - const int16_t* lambda) -{ - int16_t i; - int16_t tmp16; - int32_t tmp32; - - int16_t randW16[PART_LEN]; - int16_t uReal[PART_LEN1]; - int16_t uImag[PART_LEN1]; - int32_t outLShift32; - int16_t noiseRShift16[PART_LEN1]; - - int16_t shiftFromNearToNoise = kNoiseEstQDomain - aecm->dfaCleanQDomain; - int16_t minTrackShift; - - assert(shiftFromNearToNoise >= 0); - assert(shiftFromNearToNoise < 16); - - if (aecm->noiseEstCtr < 100) - { - // Track the minimum more quickly initially. - aecm->noiseEstCtr++; - minTrackShift = 6; - } else - { - minTrackShift = 9; - } - - // Estimate noise power. - for (i = 0; i < PART_LEN1; i++) - { - - // Shift to the noise domain. - tmp32 = (int32_t)dfa[i]; - outLShift32 = WEBRTC_SPL_LSHIFT_W32(tmp32, shiftFromNearToNoise); - - if (outLShift32 < aecm->noiseEst[i]) - { - // Reset "too low" counter - aecm->noiseEstTooLowCtr[i] = 0; - // Track the minimum. - if (aecm->noiseEst[i] < (1 << minTrackShift)) - { - // For small values, decrease noiseEst[i] every - // |kNoiseEstIncCount| block. The regular approach below can not - // go further down due to truncation. - aecm->noiseEstTooHighCtr[i]++; - if (aecm->noiseEstTooHighCtr[i] >= kNoiseEstIncCount) - { - aecm->noiseEst[i]--; - aecm->noiseEstTooHighCtr[i] = 0; // Reset the counter - } - } - else - { - aecm->noiseEst[i] -= ((aecm->noiseEst[i] - outLShift32) >> minTrackShift); - } - } else - { - // Reset "too high" counter - aecm->noiseEstTooHighCtr[i] = 0; - // Ramp slowly upwards until we hit the minimum again. - if ((aecm->noiseEst[i] >> 19) > 0) - { - // Avoid overflow. - // Multiplication with 2049 will cause wrap around. Scale - // down first and then multiply - aecm->noiseEst[i] >>= 11; - aecm->noiseEst[i] *= 2049; - } - else if ((aecm->noiseEst[i] >> 11) > 0) - { - // Large enough for relative increase - aecm->noiseEst[i] *= 2049; - aecm->noiseEst[i] >>= 11; - } - else - { - // Make incremental increases based on size every - // |kNoiseEstIncCount| block - aecm->noiseEstTooLowCtr[i]++; - if (aecm->noiseEstTooLowCtr[i] >= kNoiseEstIncCount) - { - aecm->noiseEst[i] += (aecm->noiseEst[i] >> 9) + 1; - aecm->noiseEstTooLowCtr[i] = 0; // Reset counter - } - } - } - } - - for (i = 0; i < PART_LEN1; i++) - { - tmp32 = WEBRTC_SPL_RSHIFT_W32(aecm->noiseEst[i], shiftFromNearToNoise); - if (tmp32 > 32767) - { - tmp32 = 32767; - aecm->noiseEst[i] = WEBRTC_SPL_LSHIFT_W32(tmp32, shiftFromNearToNoise); - } - noiseRShift16[i] = (int16_t)tmp32; - - tmp16 = ONE_Q14 - lambda[i]; - noiseRShift16[i] - = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tmp16, noiseRShift16[i], 14); - } - - // Generate a uniform random array on [0 2^15-1]. - WebRtcSpl_RandUArray(randW16, PART_LEN, &aecm->seed); - - // Generate noise according to estimated energy. - uReal[0] = 0; // Reject LF noise. - uImag[0] = 0; - for (i = 1; i < PART_LEN1; i++) - { - // Get a random index for the cos and sin tables over [0 359]. - tmp16 = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(359, randW16[i - 1], 15); - - // Tables are in Q13. - uReal[i] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(noiseRShift16[i], - kCosTable[tmp16], 13); - uImag[i] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(-noiseRShift16[i], - kSinTable[tmp16], 13); - } - uImag[PART_LEN] = 0; - - for (i = 0; i < PART_LEN1; i++) - { - out[i].real = WEBRTC_SPL_ADD_SAT_W16(out[i].real, uReal[i]); - out[i].imag = WEBRTC_SPL_ADD_SAT_W16(out[i].imag, uImag[i]); - } -} - void WebRtcAecm_BufferFarFrame(AecmCore_t* const aecm, const int16_t* const farend, const int farLen) diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/aecm/aecm_core.h b/media/webrtc/trunk/webrtc/modules/audio_processing/aecm/aecm_core.h index 64251d5221bf..e56ede6901ed 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/aecm/aecm_core.h +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/aecm/aecm_core.h @@ -272,6 +272,125 @@ void WebRtcAecm_FetchFarFrame(AecmCore_t * const aecm, int16_t * const farend, const int farLen, const int knownDelay); + +// All the functions below are intended to be private + +//////////////////////////////////////////////////////////////////////////////// +// WebRtcAecm_UpdateFarHistory() +// +// Moves the pointer to the next entry and inserts |far_spectrum| and +// corresponding Q-domain in its buffer. +// +// Inputs: +// - self : Pointer to the delay estimation instance +// - far_spectrum : Pointer to the far end spectrum +// - far_q : Q-domain of far end spectrum +// +void WebRtcAecm_UpdateFarHistory(AecmCore_t* self, + uint16_t* far_spectrum, + int far_q); + +//////////////////////////////////////////////////////////////////////////////// +// WebRtcAecm_AlignedFarend() +// +// Returns a pointer to the far end spectrum aligned to current near end +// spectrum. The function WebRtc_DelayEstimatorProcessFix(...) should have been +// called before AlignedFarend(...). Otherwise, you get the pointer to the +// previous frame. The memory is only valid until the next call of +// WebRtc_DelayEstimatorProcessFix(...). +// +// Inputs: +// - self : Pointer to the AECM instance. +// - delay : Current delay estimate. +// +// Output: +// - far_q : The Q-domain of the aligned far end spectrum +// +// Return value: +// - far_spectrum : Pointer to the aligned far end spectrum +// NULL - Error +// +const uint16_t* WebRtcAecm_AlignedFarend(AecmCore_t* self, + int* far_q, + int delay); + +/////////////////////////////////////////////////////////////////////////////// +// WebRtcAecm_CalcSuppressionGain() +// +// This function calculates the suppression gain that is used in the +// Wiener filter. +// +// Inputs: +// - aecm : Pointer to the AECM instance. +// +// Return value: +// - supGain : Suppression gain with which to scale the noise +// level (Q14). +// +int16_t WebRtcAecm_CalcSuppressionGain(AecmCore_t * const aecm); + +/////////////////////////////////////////////////////////////////////////////// +// WebRtcAecm_CalcEnergies() +// +// This function calculates the log of energies for nearend, farend and +// estimated echoes. There is also an update of energy decision levels, +// i.e. internal VAD. +// +// Inputs: +// - aecm : Pointer to the AECM instance. +// - far_spectrum : Pointer to farend spectrum. +// - far_q : Q-domain of farend spectrum. +// - nearEner : Near end energy for current block in +// Q(aecm->dfaQDomain). +// +// Output: +// - echoEst : Estimated echo in Q(xfa_q+RESOLUTION_CHANNEL16). +// +void WebRtcAecm_CalcEnergies(AecmCore_t * aecm, + const uint16_t* far_spectrum, + const int16_t far_q, + const uint32_t nearEner, + int32_t * echoEst); + +/////////////////////////////////////////////////////////////////////////////// +// WebRtcAecm_CalcStepSize() +// +// This function calculates the step size used in channel estimation +// +// Inputs: +// - aecm : Pointer to the AECM instance. +// +// Return value: +// - mu : Stepsize in log2(), i.e. number of shifts. +// +int16_t WebRtcAecm_CalcStepSize(AecmCore_t * const aecm); + +/////////////////////////////////////////////////////////////////////////////// +// WebRtcAecm_UpdateChannel(...) +// +// This function performs channel estimation. +// NLMS and decision on channel storage. +// +// Inputs: +// - aecm : Pointer to the AECM instance. +// - far_spectrum : Absolute value of the farend signal in Q(far_q) +// - far_q : Q-domain of the farend signal +// - dfa : Absolute value of the nearend signal +// (Q[aecm->dfaQDomain]) +// - mu : NLMS step size. +// Input/Output: +// - echoEst : Estimated echo in Q(far_q+RESOLUTION_CHANNEL16). +// +void WebRtcAecm_UpdateChannel(AecmCore_t * aecm, + const uint16_t* far_spectrum, + const int16_t far_q, + const uint16_t * const dfa, + const int16_t mu, + int32_t * echoEst); + +extern const int16_t WebRtcAecm_kCosTable[]; +extern const int16_t WebRtcAecm_kSinTable[]; + /////////////////////////////////////////////////////////////////////////////// // Some function pointers, for internal functions shared by ARM NEON and // generic C code. @@ -312,4 +431,20 @@ void WebRtcAecm_StoreAdaptiveChannelNeon(AecmCore_t* aecm, void WebRtcAecm_ResetAdaptiveChannelNeon(AecmCore_t* aecm); #endif +#if defined(MIPS32_LE) +void WebRtcAecm_CalcLinearEnergies_mips(AecmCore_t* aecm, + const uint16_t* far_spectrum, + int32_t* echo_est, + uint32_t* far_energy, + uint32_t* echo_energy_adapt, + uint32_t* echo_energy_stored); +#if defined(MIPS_DSP_R1_LE) +void WebRtcAecm_StoreAdaptiveChannel_mips(AecmCore_t* aecm, + const uint16_t* far_spectrum, + int32_t* echo_est); + +void WebRtcAecm_ResetAdaptiveChannel_mips(AecmCore_t* aecm); +#endif +#endif + #endif diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/aecm/aecm_core_c.c b/media/webrtc/trunk/webrtc/modules/audio_processing/aecm/aecm_core_c.c new file mode 100644 index 000000000000..63d4ac90280f --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/aecm/aecm_core_c.c @@ -0,0 +1,792 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/audio_processing/aecm/aecm_core.h" + +#include +#include +#include + +#include "webrtc/common_audio/signal_processing/include/real_fft.h" +#include "webrtc/modules/audio_processing/aecm/include/echo_control_mobile.h" +#include "webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h" +#include "webrtc/modules/audio_processing/utility/ring_buffer.h" +#include "webrtc/system_wrappers/interface/compile_assert_c.h" +#include "webrtc/system_wrappers/interface/cpu_features_wrapper.h" +#include "webrtc/typedefs.h" + +// Square root of Hanning window in Q14. +#if defined(WEBRTC_DETECT_ARM_NEON) || defined(WEBRTC_ARCH_ARM_NEON) +// Table is defined in an ARM assembly file. +extern const ALIGN8_BEG int16_t WebRtcAecm_kSqrtHanning[] ALIGN8_END; +#else +static const ALIGN8_BEG int16_t WebRtcAecm_kSqrtHanning[] ALIGN8_END = { + 0, 399, 798, 1196, 1594, 1990, 2386, 2780, 3172, + 3562, 3951, 4337, 4720, 5101, 5478, 5853, 6224, + 6591, 6954, 7313, 7668, 8019, 8364, 8705, 9040, + 9370, 9695, 10013, 10326, 10633, 10933, 11227, 11514, + 11795, 12068, 12335, 12594, 12845, 13089, 13325, 13553, + 13773, 13985, 14189, 14384, 14571, 14749, 14918, 15079, + 15231, 15373, 15506, 15631, 15746, 15851, 15947, 16034, + 16111, 16179, 16237, 16286, 16325, 16354, 16373, 16384 +}; +#endif + +#ifdef AECM_WITH_ABS_APPROX +//Q15 alpha = 0.99439986968132 const Factor for magnitude approximation +static const uint16_t kAlpha1 = 32584; +//Q15 beta = 0.12967166976970 const Factor for magnitude approximation +static const uint16_t kBeta1 = 4249; +//Q15 alpha = 0.94234827210087 const Factor for magnitude approximation +static const uint16_t kAlpha2 = 30879; +//Q15 beta = 0.33787806009150 const Factor for magnitude approximation +static const uint16_t kBeta2 = 11072; +//Q15 alpha = 0.82247698684306 const Factor for magnitude approximation +static const uint16_t kAlpha3 = 26951; +//Q15 beta = 0.57762063060713 const Factor for magnitude approximation +static const uint16_t kBeta3 = 18927; +#endif + +static const int16_t kNoiseEstQDomain = 15; +static const int16_t kNoiseEstIncCount = 5; + +static void ComfortNoise(AecmCore_t* aecm, + const uint16_t* dfa, + complex16_t* out, + const int16_t* lambda); + +static void WindowAndFFT(AecmCore_t* aecm, + int16_t* fft, + const int16_t* time_signal, + complex16_t* freq_signal, + int time_signal_scaling) { + int i = 0; + + // FFT of signal + for (i = 0; i < PART_LEN; i++) { + // Window time domain signal and insert into real part of + // transformation array |fft| + fft[i] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT( + (time_signal[i] << time_signal_scaling), + WebRtcAecm_kSqrtHanning[i], + 14); + fft[PART_LEN + i] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT( + (time_signal[i + PART_LEN] << time_signal_scaling), + WebRtcAecm_kSqrtHanning[PART_LEN - i], + 14); + } + + // Do forward FFT, then take only the first PART_LEN complex samples, + // and change signs of the imaginary parts. + WebRtcSpl_RealForwardFFT(aecm->real_fft, fft, (int16_t*)freq_signal); + for (i = 0; i < PART_LEN; i++) { + freq_signal[i].imag = -freq_signal[i].imag; + } +} + +static void InverseFFTAndWindow(AecmCore_t* aecm, + int16_t* fft, + complex16_t* efw, + int16_t* output, + const int16_t* nearendClean) +{ + int i, j, outCFFT; + int32_t tmp32no1; + // Reuse |efw| for the inverse FFT output after transferring + // the contents to |fft|. + int16_t* ifft_out = (int16_t*)efw; + + // Synthesis + for (i = 1, j = 2; i < PART_LEN; i += 1, j += 2) { + fft[j] = efw[i].real; + fft[j + 1] = -efw[i].imag; + } + fft[0] = efw[0].real; + fft[1] = -efw[0].imag; + + fft[PART_LEN2] = efw[PART_LEN].real; + fft[PART_LEN2 + 1] = -efw[PART_LEN].imag; + + // Inverse FFT. Keep outCFFT to scale the samples in the next block. + outCFFT = WebRtcSpl_RealInverseFFT(aecm->real_fft, fft, ifft_out); + for (i = 0; i < PART_LEN; i++) { + ifft_out[i] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND( + ifft_out[i], WebRtcAecm_kSqrtHanning[i], 14); + tmp32no1 = WEBRTC_SPL_SHIFT_W32((int32_t)ifft_out[i], + outCFFT - aecm->dfaCleanQDomain); + output[i] = (int16_t)WEBRTC_SPL_SAT(WEBRTC_SPL_WORD16_MAX, + tmp32no1 + aecm->outBuf[i], + WEBRTC_SPL_WORD16_MIN); + + tmp32no1 = WEBRTC_SPL_MUL_16_16_RSFT(ifft_out[PART_LEN + i], + WebRtcAecm_kSqrtHanning[PART_LEN - i], + 14); + tmp32no1 = WEBRTC_SPL_SHIFT_W32(tmp32no1, + outCFFT - aecm->dfaCleanQDomain); + aecm->outBuf[i] = (int16_t)WEBRTC_SPL_SAT(WEBRTC_SPL_WORD16_MAX, + tmp32no1, + WEBRTC_SPL_WORD16_MIN); + } + + // Copy the current block to the old position + // (aecm->outBuf is shifted elsewhere) + memcpy(aecm->xBuf, aecm->xBuf + PART_LEN, sizeof(int16_t) * PART_LEN); + memcpy(aecm->dBufNoisy, + aecm->dBufNoisy + PART_LEN, + sizeof(int16_t) * PART_LEN); + if (nearendClean != NULL) + { + memcpy(aecm->dBufClean, + aecm->dBufClean + PART_LEN, + sizeof(int16_t) * PART_LEN); + } +} + +// Transforms a time domain signal into the frequency domain, outputting the +// complex valued signal, absolute value and sum of absolute values. +// +// time_signal [in] Pointer to time domain signal +// freq_signal_real [out] Pointer to real part of frequency domain array +// freq_signal_imag [out] Pointer to imaginary part of frequency domain +// array +// freq_signal_abs [out] Pointer to absolute value of frequency domain +// array +// freq_signal_sum_abs [out] Pointer to the sum of all absolute values in +// the frequency domain array +// return value The Q-domain of current frequency values +// +static int TimeToFrequencyDomain(AecmCore_t* aecm, + const int16_t* time_signal, + complex16_t* freq_signal, + uint16_t* freq_signal_abs, + uint32_t* freq_signal_sum_abs) +{ + int i = 0; + int time_signal_scaling = 0; + + int32_t tmp32no1 = 0; + int32_t tmp32no2 = 0; + + // In fft_buf, +16 for 32-byte alignment. + int16_t fft_buf[PART_LEN4 + 16]; + int16_t *fft = (int16_t *) (((uintptr_t) fft_buf + 31) & ~31); + + int16_t tmp16no1; +#ifndef WEBRTC_ARCH_ARM_V7 + int16_t tmp16no2; +#endif +#ifdef AECM_WITH_ABS_APPROX + int16_t max_value = 0; + int16_t min_value = 0; + uint16_t alpha = 0; + uint16_t beta = 0; +#endif + +#ifdef AECM_DYNAMIC_Q + tmp16no1 = WebRtcSpl_MaxAbsValueW16(time_signal, PART_LEN2); + time_signal_scaling = WebRtcSpl_NormW16(tmp16no1); +#endif + + WindowAndFFT(aecm, fft, time_signal, freq_signal, time_signal_scaling); + + // Extract imaginary and real part, calculate the magnitude for + // all frequency bins + freq_signal[0].imag = 0; + freq_signal[PART_LEN].imag = 0; + freq_signal_abs[0] = (uint16_t)WEBRTC_SPL_ABS_W16(freq_signal[0].real); + freq_signal_abs[PART_LEN] = (uint16_t)WEBRTC_SPL_ABS_W16( + freq_signal[PART_LEN].real); + (*freq_signal_sum_abs) = (uint32_t)(freq_signal_abs[0]) + + (uint32_t)(freq_signal_abs[PART_LEN]); + + for (i = 1; i < PART_LEN; i++) + { + if (freq_signal[i].real == 0) + { + freq_signal_abs[i] = (uint16_t)WEBRTC_SPL_ABS_W16(freq_signal[i].imag); + } + else if (freq_signal[i].imag == 0) + { + freq_signal_abs[i] = (uint16_t)WEBRTC_SPL_ABS_W16(freq_signal[i].real); + } + else + { + // Approximation for magnitude of complex fft output + // magn = sqrt(real^2 + imag^2) + // magn ~= alpha * max(|imag|,|real|) + beta * min(|imag|,|real|) + // + // The parameters alpha and beta are stored in Q15 + +#ifdef AECM_WITH_ABS_APPROX + tmp16no1 = WEBRTC_SPL_ABS_W16(freq_signal[i].real); + tmp16no2 = WEBRTC_SPL_ABS_W16(freq_signal[i].imag); + + if(tmp16no1 > tmp16no2) + { + max_value = tmp16no1; + min_value = tmp16no2; + } else + { + max_value = tmp16no2; + min_value = tmp16no1; + } + + // Magnitude in Q(-6) + if ((max_value >> 2) > min_value) + { + alpha = kAlpha1; + beta = kBeta1; + } else if ((max_value >> 1) > min_value) + { + alpha = kAlpha2; + beta = kBeta2; + } else + { + alpha = kAlpha3; + beta = kBeta3; + } + tmp16no1 = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(max_value, alpha, 15); + tmp16no2 = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(min_value, beta, 15); + freq_signal_abs[i] = (uint16_t)tmp16no1 + (uint16_t)tmp16no2; +#else +#ifdef WEBRTC_ARCH_ARM_V7 + __asm __volatile( + "smulbb %[tmp32no1], %[real], %[real]\n\t" + "smlabb %[tmp32no2], %[imag], %[imag], %[tmp32no1]\n\t" + :[tmp32no1]"+r"(tmp32no1), + [tmp32no2]"=r"(tmp32no2) + :[real]"r"(freq_signal[i].real), + [imag]"r"(freq_signal[i].imag) + ); +#else + tmp16no1 = WEBRTC_SPL_ABS_W16(freq_signal[i].real); + tmp16no2 = WEBRTC_SPL_ABS_W16(freq_signal[i].imag); + tmp32no1 = WEBRTC_SPL_MUL_16_16(tmp16no1, tmp16no1); + tmp32no2 = WEBRTC_SPL_MUL_16_16(tmp16no2, tmp16no2); + tmp32no2 = WEBRTC_SPL_ADD_SAT_W32(tmp32no1, tmp32no2); +#endif // WEBRTC_ARCH_ARM_V7 + tmp32no1 = WebRtcSpl_SqrtFloor(tmp32no2); + + freq_signal_abs[i] = (uint16_t)tmp32no1; +#endif // AECM_WITH_ABS_APPROX + } + (*freq_signal_sum_abs) += (uint32_t)freq_signal_abs[i]; + } + + return time_signal_scaling; +} + +int WebRtcAecm_ProcessBlock(AecmCore_t * aecm, + const int16_t * farend, + const int16_t * nearendNoisy, + const int16_t * nearendClean, + int16_t * output) +{ + int i; + + uint32_t xfaSum; + uint32_t dfaNoisySum; + uint32_t dfaCleanSum; + uint32_t echoEst32Gained; + uint32_t tmpU32; + + int32_t tmp32no1; + + uint16_t xfa[PART_LEN1]; + uint16_t dfaNoisy[PART_LEN1]; + uint16_t dfaClean[PART_LEN1]; + uint16_t* ptrDfaClean = dfaClean; + const uint16_t* far_spectrum_ptr = NULL; + + // 32 byte aligned buffers (with +8 or +16). + // TODO (kma): define fft with complex16_t. + int16_t fft_buf[PART_LEN4 + 2 + 16]; // +2 to make a loop safe. + int32_t echoEst32_buf[PART_LEN1 + 8]; + int32_t dfw_buf[PART_LEN2 + 8]; + int32_t efw_buf[PART_LEN2 + 8]; + + int16_t* fft = (int16_t*) (((uintptr_t) fft_buf + 31) & ~ 31); + int32_t* echoEst32 = (int32_t*) (((uintptr_t) echoEst32_buf + 31) & ~ 31); + complex16_t* dfw = (complex16_t*) (((uintptr_t) dfw_buf + 31) & ~ 31); + complex16_t* efw = (complex16_t*) (((uintptr_t) efw_buf + 31) & ~ 31); + + int16_t hnl[PART_LEN1]; + int16_t numPosCoef = 0; + int16_t nlpGain = ONE_Q14; + int delay; + int16_t tmp16no1; + int16_t tmp16no2; + int16_t mu; + int16_t supGain; + int16_t zeros32, zeros16; + int16_t zerosDBufNoisy, zerosDBufClean, zerosXBuf; + int far_q; + int16_t resolutionDiff, qDomainDiff; + + const int kMinPrefBand = 4; + const int kMaxPrefBand = 24; + int32_t avgHnl32 = 0; + + // Determine startup state. There are three states: + // (0) the first CONV_LEN blocks + // (1) another CONV_LEN blocks + // (2) the rest + + if (aecm->startupState < 2) + { + aecm->startupState = (aecm->totCount >= CONV_LEN) + + (aecm->totCount >= CONV_LEN2); + } + // END: Determine startup state + + // Buffer near and far end signals + memcpy(aecm->xBuf + PART_LEN, farend, sizeof(int16_t) * PART_LEN); + memcpy(aecm->dBufNoisy + PART_LEN, nearendNoisy, sizeof(int16_t) * PART_LEN); + if (nearendClean != NULL) + { + memcpy(aecm->dBufClean + PART_LEN, + nearendClean, + sizeof(int16_t) * PART_LEN); + } + + // Transform far end signal from time domain to frequency domain. + far_q = TimeToFrequencyDomain(aecm, + aecm->xBuf, + dfw, + xfa, + &xfaSum); + + // Transform noisy near end signal from time domain to frequency domain. + zerosDBufNoisy = TimeToFrequencyDomain(aecm, + aecm->dBufNoisy, + dfw, + dfaNoisy, + &dfaNoisySum); + aecm->dfaNoisyQDomainOld = aecm->dfaNoisyQDomain; + aecm->dfaNoisyQDomain = (int16_t)zerosDBufNoisy; + + + if (nearendClean == NULL) + { + ptrDfaClean = dfaNoisy; + aecm->dfaCleanQDomainOld = aecm->dfaNoisyQDomainOld; + aecm->dfaCleanQDomain = aecm->dfaNoisyQDomain; + dfaCleanSum = dfaNoisySum; + } else + { + // Transform clean near end signal from time domain to frequency domain. + zerosDBufClean = TimeToFrequencyDomain(aecm, + aecm->dBufClean, + dfw, + dfaClean, + &dfaCleanSum); + aecm->dfaCleanQDomainOld = aecm->dfaCleanQDomain; + aecm->dfaCleanQDomain = (int16_t)zerosDBufClean; + } + + // Get the delay + // Save far-end history and estimate delay + WebRtcAecm_UpdateFarHistory(aecm, xfa, far_q); + if (WebRtc_AddFarSpectrumFix(aecm->delay_estimator_farend, + xfa, + PART_LEN1, + far_q) == -1) { + return -1; + } + delay = WebRtc_DelayEstimatorProcessFix(aecm->delay_estimator, + dfaNoisy, + PART_LEN1, + zerosDBufNoisy); + if (delay == -1) + { + return -1; + } + else if (delay == -2) + { + // If the delay is unknown, we assume zero. + // NOTE: this will have to be adjusted if we ever add lookahead. + delay = 0; + } + + if (aecm->fixedDelay >= 0) + { + // Use fixed delay + delay = aecm->fixedDelay; + } + + // Get aligned far end spectrum + far_spectrum_ptr = WebRtcAecm_AlignedFarend(aecm, &far_q, delay); + zerosXBuf = (int16_t) far_q; + if (far_spectrum_ptr == NULL) + { + return -1; + } + + // Calculate log(energy) and update energy threshold levels + WebRtcAecm_CalcEnergies(aecm, + far_spectrum_ptr, + zerosXBuf, + dfaNoisySum, + echoEst32); + + // Calculate stepsize + mu = WebRtcAecm_CalcStepSize(aecm); + + // Update counters + aecm->totCount++; + + // This is the channel estimation algorithm. + // It is base on NLMS but has a variable step length, + // which was calculated above. + WebRtcAecm_UpdateChannel(aecm, + far_spectrum_ptr, + zerosXBuf, + dfaNoisy, + mu, + echoEst32); + supGain = WebRtcAecm_CalcSuppressionGain(aecm); + + + // Calculate Wiener filter hnl[] + for (i = 0; i < PART_LEN1; i++) + { + // Far end signal through channel estimate in Q8 + // How much can we shift right to preserve resolution + tmp32no1 = echoEst32[i] - aecm->echoFilt[i]; + aecm->echoFilt[i] += WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_32_16(tmp32no1, + 50), 8); + + zeros32 = WebRtcSpl_NormW32(aecm->echoFilt[i]) + 1; + zeros16 = WebRtcSpl_NormW16(supGain) + 1; + if (zeros32 + zeros16 > 16) + { + // Multiplication is safe + // Result in + // Q(RESOLUTION_CHANNEL+RESOLUTION_SUPGAIN+ + // aecm->xfaQDomainBuf[diff]) + echoEst32Gained = WEBRTC_SPL_UMUL_32_16((uint32_t)aecm->echoFilt[i], + (uint16_t)supGain); + resolutionDiff = 14 - RESOLUTION_CHANNEL16 - RESOLUTION_SUPGAIN; + resolutionDiff += (aecm->dfaCleanQDomain - zerosXBuf); + } else + { + tmp16no1 = 17 - zeros32 - zeros16; + resolutionDiff = 14 + tmp16no1 - RESOLUTION_CHANNEL16 - + RESOLUTION_SUPGAIN; + resolutionDiff += (aecm->dfaCleanQDomain - zerosXBuf); + if (zeros32 > tmp16no1) + { + echoEst32Gained = WEBRTC_SPL_UMUL_32_16((uint32_t)aecm->echoFilt[i], + (uint16_t)WEBRTC_SPL_RSHIFT_W16( + supGain, + tmp16no1) + ); + } else + { + // Result in Q-(RESOLUTION_CHANNEL+RESOLUTION_SUPGAIN-16) + echoEst32Gained = WEBRTC_SPL_UMUL_32_16((uint32_t)WEBRTC_SPL_RSHIFT_W32( + aecm->echoFilt[i], + tmp16no1), + (uint16_t)supGain); + } + } + + zeros16 = WebRtcSpl_NormW16(aecm->nearFilt[i]); + if ((zeros16 < (aecm->dfaCleanQDomain - aecm->dfaCleanQDomainOld)) + & (aecm->nearFilt[i])) + { + tmp16no1 = WEBRTC_SPL_SHIFT_W16(aecm->nearFilt[i], zeros16); + qDomainDiff = zeros16 - aecm->dfaCleanQDomain + aecm->dfaCleanQDomainOld; + } else + { + tmp16no1 = WEBRTC_SPL_SHIFT_W16(aecm->nearFilt[i], + aecm->dfaCleanQDomain - + aecm->dfaCleanQDomainOld); + qDomainDiff = 0; + } + tmp16no2 = WEBRTC_SPL_SHIFT_W16(ptrDfaClean[i], qDomainDiff); + tmp32no1 = (int32_t)(tmp16no2 - tmp16no1); + tmp16no2 = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32no1, 4); + tmp16no2 += tmp16no1; + zeros16 = WebRtcSpl_NormW16(tmp16no2); + if ((tmp16no2) & (-qDomainDiff > zeros16)) + { + aecm->nearFilt[i] = WEBRTC_SPL_WORD16_MAX; + } else + { + aecm->nearFilt[i] = WEBRTC_SPL_SHIFT_W16(tmp16no2, -qDomainDiff); + } + + // Wiener filter coefficients, resulting hnl in Q14 + if (echoEst32Gained == 0) + { + hnl[i] = ONE_Q14; + } else if (aecm->nearFilt[i] == 0) + { + hnl[i] = 0; + } else + { + // Multiply the suppression gain + // Rounding + echoEst32Gained += (uint32_t)(aecm->nearFilt[i] >> 1); + tmpU32 = WebRtcSpl_DivU32U16(echoEst32Gained, + (uint16_t)aecm->nearFilt[i]); + + // Current resolution is + // Q-(RESOLUTION_CHANNEL+RESOLUTION_SUPGAIN- max(0,17-zeros16- zeros32)) + // Make sure we are in Q14 + tmp32no1 = (int32_t)WEBRTC_SPL_SHIFT_W32(tmpU32, resolutionDiff); + if (tmp32no1 > ONE_Q14) + { + hnl[i] = 0; + } else if (tmp32no1 < 0) + { + hnl[i] = ONE_Q14; + } else + { + // 1-echoEst/dfa + hnl[i] = ONE_Q14 - (int16_t)tmp32no1; + if (hnl[i] < 0) + { + hnl[i] = 0; + } + } + } + if (hnl[i]) + { + numPosCoef++; + } + } + // Only in wideband. Prevent the gain in upper band from being larger than + // in lower band. + if (aecm->mult == 2) + { + // TODO(bjornv): Investigate if the scaling of hnl[i] below can cause + // speech distortion in double-talk. + for (i = 0; i < PART_LEN1; i++) + { + hnl[i] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(hnl[i], hnl[i], 14); + } + + for (i = kMinPrefBand; i <= kMaxPrefBand; i++) + { + avgHnl32 += (int32_t)hnl[i]; + } + assert(kMaxPrefBand - kMinPrefBand + 1 > 0); + avgHnl32 /= (kMaxPrefBand - kMinPrefBand + 1); + + for (i = kMaxPrefBand; i < PART_LEN1; i++) + { + if (hnl[i] > (int16_t)avgHnl32) + { + hnl[i] = (int16_t)avgHnl32; + } + } + } + + // Calculate NLP gain, result is in Q14 + if (aecm->nlpFlag) + { + for (i = 0; i < PART_LEN1; i++) + { + // Truncate values close to zero and one. + if (hnl[i] > NLP_COMP_HIGH) + { + hnl[i] = ONE_Q14; + } else if (hnl[i] < NLP_COMP_LOW) + { + hnl[i] = 0; + } + + // Remove outliers + if (numPosCoef < 3) + { + nlpGain = 0; + } else + { + nlpGain = ONE_Q14; + } + + // NLP + if ((hnl[i] == ONE_Q14) && (nlpGain == ONE_Q14)) + { + hnl[i] = ONE_Q14; + } else + { + hnl[i] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(hnl[i], nlpGain, 14); + } + + // multiply with Wiener coefficients + efw[i].real = (int16_t)(WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(dfw[i].real, + hnl[i], 14)); + efw[i].imag = (int16_t)(WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(dfw[i].imag, + hnl[i], 14)); + } + } + else + { + // multiply with Wiener coefficients + for (i = 0; i < PART_LEN1; i++) + { + efw[i].real = (int16_t)(WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(dfw[i].real, + hnl[i], 14)); + efw[i].imag = (int16_t)(WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(dfw[i].imag, + hnl[i], 14)); + } + } + + if (aecm->cngMode == AecmTrue) + { + ComfortNoise(aecm, ptrDfaClean, efw, hnl); + } + + InverseFFTAndWindow(aecm, fft, efw, output, nearendClean); + + return 0; +} + + +static void ComfortNoise(AecmCore_t* aecm, + const uint16_t* dfa, + complex16_t* out, + const int16_t* lambda) +{ + int16_t i; + int16_t tmp16; + int32_t tmp32; + + int16_t randW16[PART_LEN]; + int16_t uReal[PART_LEN1]; + int16_t uImag[PART_LEN1]; + int32_t outLShift32; + int16_t noiseRShift16[PART_LEN1]; + + int16_t shiftFromNearToNoise = kNoiseEstQDomain - aecm->dfaCleanQDomain; + int16_t minTrackShift; + + assert(shiftFromNearToNoise >= 0); + assert(shiftFromNearToNoise < 16); + + if (aecm->noiseEstCtr < 100) + { + // Track the minimum more quickly initially. + aecm->noiseEstCtr++; + minTrackShift = 6; + } else + { + minTrackShift = 9; + } + + // Estimate noise power. + for (i = 0; i < PART_LEN1; i++) + { + // Shift to the noise domain. + tmp32 = (int32_t)dfa[i]; + outLShift32 = WEBRTC_SPL_LSHIFT_W32(tmp32, shiftFromNearToNoise); + + if (outLShift32 < aecm->noiseEst[i]) + { + // Reset "too low" counter + aecm->noiseEstTooLowCtr[i] = 0; + // Track the minimum. + if (aecm->noiseEst[i] < (1 << minTrackShift)) + { + // For small values, decrease noiseEst[i] every + // |kNoiseEstIncCount| block. The regular approach below can not + // go further down due to truncation. + aecm->noiseEstTooHighCtr[i]++; + if (aecm->noiseEstTooHighCtr[i] >= kNoiseEstIncCount) + { + aecm->noiseEst[i]--; + aecm->noiseEstTooHighCtr[i] = 0; // Reset the counter + } + } + else + { + aecm->noiseEst[i] -= ((aecm->noiseEst[i] - outLShift32) + >> minTrackShift); + } + } else + { + // Reset "too high" counter + aecm->noiseEstTooHighCtr[i] = 0; + // Ramp slowly upwards until we hit the minimum again. + if ((aecm->noiseEst[i] >> 19) > 0) + { + // Avoid overflow. + // Multiplication with 2049 will cause wrap around. Scale + // down first and then multiply + aecm->noiseEst[i] >>= 11; + aecm->noiseEst[i] *= 2049; + } + else if ((aecm->noiseEst[i] >> 11) > 0) + { + // Large enough for relative increase + aecm->noiseEst[i] *= 2049; + aecm->noiseEst[i] >>= 11; + } + else + { + // Make incremental increases based on size every + // |kNoiseEstIncCount| block + aecm->noiseEstTooLowCtr[i]++; + if (aecm->noiseEstTooLowCtr[i] >= kNoiseEstIncCount) + { + aecm->noiseEst[i] += (aecm->noiseEst[i] >> 9) + 1; + aecm->noiseEstTooLowCtr[i] = 0; // Reset counter + } + } + } + } + + for (i = 0; i < PART_LEN1; i++) + { + tmp32 = WEBRTC_SPL_RSHIFT_W32(aecm->noiseEst[i], shiftFromNearToNoise); + if (tmp32 > 32767) + { + tmp32 = 32767; + aecm->noiseEst[i] = WEBRTC_SPL_LSHIFT_W32(tmp32, shiftFromNearToNoise); + } + noiseRShift16[i] = (int16_t)tmp32; + + tmp16 = ONE_Q14 - lambda[i]; + noiseRShift16[i] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tmp16, + noiseRShift16[i], + 14); + } + + // Generate a uniform random array on [0 2^15-1]. + WebRtcSpl_RandUArray(randW16, PART_LEN, &aecm->seed); + + // Generate noise according to estimated energy. + uReal[0] = 0; // Reject LF noise. + uImag[0] = 0; + for (i = 1; i < PART_LEN1; i++) + { + // Get a random index for the cos and sin tables over [0 359]. + tmp16 = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(359, randW16[i - 1], 15); + + // Tables are in Q13. + uReal[i] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(noiseRShift16[i], + WebRtcAecm_kCosTable[tmp16], + 13); + uImag[i] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(-noiseRShift16[i], + WebRtcAecm_kSinTable[tmp16], + 13); + } + uImag[PART_LEN] = 0; + + for (i = 0; i < PART_LEN1; i++) + { + out[i].real = WEBRTC_SPL_ADD_SAT_W16(out[i].real, uReal[i]); + out[i].imag = WEBRTC_SPL_ADD_SAT_W16(out[i].imag, uImag[i]); + } +} + diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/aecm/aecm_core_mips.c b/media/webrtc/trunk/webrtc/modules/audio_processing/aecm/aecm_core_mips.c new file mode 100644 index 000000000000..6a231b384b63 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/aecm/aecm_core_mips.c @@ -0,0 +1,1571 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/audio_processing/aecm/aecm_core.h" + +#include + +#include "webrtc/modules/audio_processing/aecm/include/echo_control_mobile.h" +#include "webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h" + +static const ALIGN8_BEG int16_t WebRtcAecm_kSqrtHanning[] ALIGN8_END = { + 0, 399, 798, 1196, 1594, 1990, 2386, 2780, 3172, + 3562, 3951, 4337, 4720, 5101, 5478, 5853, 6224, + 6591, 6954, 7313, 7668, 8019, 8364, 8705, 9040, + 9370, 9695, 10013, 10326, 10633, 10933, 11227, 11514, + 11795, 12068, 12335, 12594, 12845, 13089, 13325, 13553, + 13773, 13985, 14189, 14384, 14571, 14749, 14918, 15079, + 15231, 15373, 15506, 15631, 15746, 15851, 15947, 16034, + 16111, 16179, 16237, 16286, 16325, 16354, 16373, 16384 +}; + +static const int16_t kNoiseEstQDomain = 15; +static const int16_t kNoiseEstIncCount = 5; + +static int16_t coefTable[] = { + 0, 4, 256, 260, 128, 132, 384, 388, + 64, 68, 320, 324, 192, 196, 448, 452, + 32, 36, 288, 292, 160, 164, 416, 420, + 96, 100, 352, 356, 224, 228, 480, 484, + 16, 20, 272, 276, 144, 148, 400, 404, + 80, 84, 336, 340, 208, 212, 464, 468, + 48, 52, 304, 308, 176, 180, 432, 436, + 112, 116, 368, 372, 240, 244, 496, 500, + 8, 12, 264, 268, 136, 140, 392, 396, + 72, 76, 328, 332, 200, 204, 456, 460, + 40, 44, 296, 300, 168, 172, 424, 428, + 104, 108, 360, 364, 232, 236, 488, 492, + 24, 28, 280, 284, 152, 156, 408, 412, + 88, 92, 344, 348, 216, 220, 472, 476, + 56, 60, 312, 316, 184, 188, 440, 444, + 120, 124, 376, 380, 248, 252, 504, 508 +}; + +static int16_t coefTable_ifft[] = { + 0, 512, 256, 508, 128, 252, 384, 380, + 64, 124, 320, 444, 192, 188, 448, 316, + 32, 60, 288, 476, 160, 220, 416, 348, + 96, 92, 352, 412, 224, 156, 480, 284, + 16, 28, 272, 492, 144, 236, 400, 364, + 80, 108, 336, 428, 208, 172, 464, 300, + 48, 44, 304, 460, 176, 204, 432, 332, + 112, 76, 368, 396, 240, 140, 496, 268, + 8, 12, 264, 500, 136, 244, 392, 372, + 72, 116, 328, 436, 200, 180, 456, 308, + 40, 52, 296, 468, 168, 212, 424, 340, + 104, 84, 360, 404, 232, 148, 488, 276, + 24, 20, 280, 484, 152, 228, 408, 356, + 88, 100, 344, 420, 216, 164, 472, 292, + 56, 36, 312, 452, 184, 196, 440, 324, + 120, 68, 376, 388, 248, 132, 504, 260 +}; + +static void ComfortNoise(AecmCore_t* aecm, + const uint16_t* dfa, + complex16_t* out, + const int16_t* lambda); + +static void WindowAndFFT(AecmCore_t* aecm, + int16_t* fft, + const int16_t* time_signal, + complex16_t* freq_signal, + int time_signal_scaling) { + int i, j; + int32_t tmp1, tmp2, tmp3, tmp4; + int16_t* pfrfi; + complex16_t* pfreq_signal; + int16_t f_coef, s_coef; + int32_t load_ptr, store_ptr1, store_ptr2, shift, shift1; + int32_t hann, hann1, coefs; + + memset(fft, 0, sizeof(int16_t) * PART_LEN4); + + // FFT of signal + __asm __volatile ( + ".set push \n\t" + ".set noreorder \n\t" + "addiu %[shift], %[time_signal_scaling], -14 \n\t" + "addiu %[i], $zero, 64 \n\t" + "addiu %[load_ptr], %[time_signal], 0 \n\t" + "addiu %[hann], %[hanning], 0 \n\t" + "addiu %[hann1], %[hanning], 128 \n\t" + "addiu %[coefs], %[coefTable], 0 \n\t" + "bltz %[shift], 2f \n\t" + " negu %[shift1], %[shift] \n\t" + "1: \n\t" + "lh %[tmp1], 0(%[load_ptr]) \n\t" + "lh %[tmp2], 0(%[hann]) \n\t" + "lh %[tmp3], 128(%[load_ptr]) \n\t" + "lh %[tmp4], 0(%[hann1]) \n\t" + "addiu %[i], %[i], -1 \n\t" + "mul %[tmp1], %[tmp1], %[tmp2] \n\t" + "mul %[tmp3], %[tmp3], %[tmp4] \n\t" + "lh %[f_coef], 0(%[coefs]) \n\t" + "lh %[s_coef], 2(%[coefs]) \n\t" + "addiu %[load_ptr], %[load_ptr], 2 \n\t" + "addiu %[hann], %[hann], 2 \n\t" + "addiu %[hann1], %[hann1], -2 \n\t" + "addu %[store_ptr1], %[fft], %[f_coef] \n\t" + "addu %[store_ptr2], %[fft], %[s_coef] \n\t" + "sllv %[tmp1], %[tmp1], %[shift] \n\t" + "sllv %[tmp3], %[tmp3], %[shift] \n\t" + "sh %[tmp1], 0(%[store_ptr1]) \n\t" + "sh %[tmp3], 0(%[store_ptr2]) \n\t" + "bgtz %[i], 1b \n\t" + " addiu %[coefs], %[coefs], 4 \n\t" + "b 3f \n\t" + " nop \n\t" + "2: \n\t" + "lh %[tmp1], 0(%[load_ptr]) \n\t" + "lh %[tmp2], 0(%[hann]) \n\t" + "lh %[tmp3], 128(%[load_ptr]) \n\t" + "lh %[tmp4], 0(%[hann1]) \n\t" + "addiu %[i], %[i], -1 \n\t" + "mul %[tmp1], %[tmp1], %[tmp2] \n\t" + "mul %[tmp3], %[tmp3], %[tmp4] \n\t" + "lh %[f_coef], 0(%[coefs]) \n\t" + "lh %[s_coef], 2(%[coefs]) \n\t" + "addiu %[load_ptr], %[load_ptr], 2 \n\t" + "addiu %[hann], %[hann], 2 \n\t" + "addiu %[hann1], %[hann1], -2 \n\t" + "addu %[store_ptr1], %[fft], %[f_coef] \n\t" + "addu %[store_ptr2], %[fft], %[s_coef] \n\t" + "srav %[tmp1], %[tmp1], %[shift1] \n\t" + "srav %[tmp3], %[tmp3], %[shift1] \n\t" + "sh %[tmp1], 0(%[store_ptr1]) \n\t" + "sh %[tmp3], 0(%[store_ptr2]) \n\t" + "bgtz %[i], 2b \n\t" + " addiu %[coefs], %[coefs], 4 \n\t" + "3: \n\t" + ".set pop \n\t" + : [load_ptr] "=&r" (load_ptr), [shift] "=&r" (shift), [hann] "=&r" (hann), + [hann1] "=&r" (hann1), [shift1] "=&r" (shift1), [coefs] "=&r" (coefs), + [tmp1] "=&r" (tmp1), [tmp2] "=&r" (tmp2), [tmp3] "=&r" (tmp3), + [tmp4] "=&r" (tmp4), [i] "=&r" (i), [f_coef] "=&r" (f_coef), + [s_coef] "=&r" (s_coef), [store_ptr1] "=&r" (store_ptr1), + [store_ptr2] "=&r" (store_ptr2) + : [time_signal] "r" (time_signal), [coefTable] "r" (coefTable), + [time_signal_scaling] "r" (time_signal_scaling), + [hanning] "r" (WebRtcAecm_kSqrtHanning), [fft] "r" (fft) + : "memory", "hi", "lo" + ); + + WebRtcSpl_ComplexFFT(fft, PART_LEN_SHIFT, 1); + pfrfi = fft; + pfreq_signal = freq_signal; + + __asm __volatile ( + ".set push \n\t" + ".set noreorder \n\t" + "addiu %[j], $zero, 128 \n\t" + "1: \n\t" + "lh %[tmp1], 0(%[pfrfi]) \n\t" + "lh %[tmp2], 2(%[pfrfi]) \n\t" + "lh %[tmp3], 4(%[pfrfi]) \n\t" + "lh %[tmp4], 6(%[pfrfi]) \n\t" + "subu %[tmp2], $zero, %[tmp2] \n\t" + "sh %[tmp1], 0(%[pfreq_signal]) \n\t" + "sh %[tmp2], 2(%[pfreq_signal]) \n\t" + "subu %[tmp4], $zero, %[tmp4] \n\t" + "sh %[tmp3], 4(%[pfreq_signal]) \n\t" + "sh %[tmp4], 6(%[pfreq_signal]) \n\t" + "lh %[tmp1], 8(%[pfrfi]) \n\t" + "lh %[tmp2], 10(%[pfrfi]) \n\t" + "lh %[tmp3], 12(%[pfrfi]) \n\t" + "lh %[tmp4], 14(%[pfrfi]) \n\t" + "addiu %[j], %[j], -8 \n\t" + "subu %[tmp2], $zero, %[tmp2] \n\t" + "sh %[tmp1], 8(%[pfreq_signal]) \n\t" + "sh %[tmp2], 10(%[pfreq_signal]) \n\t" + "subu %[tmp4], $zero, %[tmp4] \n\t" + "sh %[tmp3], 12(%[pfreq_signal]) \n\t" + "sh %[tmp4], 14(%[pfreq_signal]) \n\t" + "addiu %[pfreq_signal], %[pfreq_signal], 16 \n\t" + "bgtz %[j], 1b \n\t" + " addiu %[pfrfi], %[pfrfi], 16 \n\t" + ".set pop \n\t" + : [tmp1] "=&r" (tmp1), [tmp2] "=&r" (tmp2), [tmp3] "=&r" (tmp3), + [j] "=&r" (j), [pfrfi] "+r" (pfrfi), [pfreq_signal] "+r" (pfreq_signal), + [tmp4] "=&r" (tmp4) + : + : "memory" + ); +} + +static void InverseFFTAndWindow(AecmCore_t* aecm, + int16_t* fft, + complex16_t* efw, + int16_t* output, + const int16_t* nearendClean) { + int i, outCFFT; + int32_t tmp1, tmp2, tmp3, tmp4, tmp_re, tmp_im; + int16_t* pcoefTable_ifft = coefTable_ifft; + int16_t* pfft = fft; + int16_t* ppfft = fft; + complex16_t* pefw = efw; + int32_t out_aecm; + int16_t* paecm_buf = aecm->outBuf; + const int16_t* p_kSqrtHanning = WebRtcAecm_kSqrtHanning; + const int16_t* pp_kSqrtHanning = &WebRtcAecm_kSqrtHanning[PART_LEN]; + int16_t* output1 = output; + + __asm __volatile ( + ".set push \n\t" + ".set noreorder \n\t" + "addiu %[i], $zero, 64 \n\t" + "1: \n\t" + "lh %[tmp1], 0(%[pcoefTable_ifft]) \n\t" + "lh %[tmp2], 2(%[pcoefTable_ifft]) \n\t" + "lh %[tmp_re], 0(%[pefw]) \n\t" + "lh %[tmp_im], 2(%[pefw]) \n\t" + "addu %[pfft], %[fft], %[tmp2] \n\t" + "sh %[tmp_re], 0(%[pfft]) \n\t" + "sh %[tmp_im], 2(%[pfft]) \n\t" + "addu %[pfft], %[fft], %[tmp1] \n\t" + "sh %[tmp_re], 0(%[pfft]) \n\t" + "subu %[tmp_im], $zero, %[tmp_im] \n\t" + "sh %[tmp_im], 2(%[pfft]) \n\t" + "lh %[tmp1], 4(%[pcoefTable_ifft]) \n\t" + "lh %[tmp2], 6(%[pcoefTable_ifft]) \n\t" + "lh %[tmp_re], 4(%[pefw]) \n\t" + "lh %[tmp_im], 6(%[pefw]) \n\t" + "addu %[pfft], %[fft], %[tmp2] \n\t" + "sh %[tmp_re], 0(%[pfft]) \n\t" + "sh %[tmp_im], 2(%[pfft]) \n\t" + "addu %[pfft], %[fft], %[tmp1] \n\t" + "sh %[tmp_re], 0(%[pfft]) \n\t" + "subu %[tmp_im], $zero, %[tmp_im] \n\t" + "sh %[tmp_im], 2(%[pfft]) \n\t" + "lh %[tmp1], 8(%[pcoefTable_ifft]) \n\t" + "lh %[tmp2], 10(%[pcoefTable_ifft]) \n\t" + "lh %[tmp_re], 8(%[pefw]) \n\t" + "lh %[tmp_im], 10(%[pefw]) \n\t" + "addu %[pfft], %[fft], %[tmp2] \n\t" + "sh %[tmp_re], 0(%[pfft]) \n\t" + "sh %[tmp_im], 2(%[pfft]) \n\t" + "addu %[pfft], %[fft], %[tmp1] \n\t" + "sh %[tmp_re], 0(%[pfft]) \n\t" + "subu %[tmp_im], $zero, %[tmp_im] \n\t" + "sh %[tmp_im], 2(%[pfft]) \n\t" + "lh %[tmp1], 12(%[pcoefTable_ifft]) \n\t" + "lh %[tmp2], 14(%[pcoefTable_ifft]) \n\t" + "lh %[tmp_re], 12(%[pefw]) \n\t" + "lh %[tmp_im], 14(%[pefw]) \n\t" + "addu %[pfft], %[fft], %[tmp2] \n\t" + "sh %[tmp_re], 0(%[pfft]) \n\t" + "sh %[tmp_im], 2(%[pfft]) \n\t" + "addu %[pfft], %[fft], %[tmp1] \n\t" + "sh %[tmp_re], 0(%[pfft]) \n\t" + "subu %[tmp_im], $zero, %[tmp_im] \n\t" + "sh %[tmp_im], 2(%[pfft]) \n\t" + "addiu %[pcoefTable_ifft], %[pcoefTable_ifft], 16 \n\t" + "addiu %[i], %[i], -4 \n\t" + "bgtz %[i], 1b \n\t" + " addiu %[pefw], %[pefw], 16 \n\t" + ".set pop \n\t" + : [tmp1] "=&r" (tmp1), [tmp2] "=&r" (tmp2), [pfft] "+r" (pfft), + [i] "=&r" (i), [tmp_re] "=&r" (tmp_re), [tmp_im] "=&r" (tmp_im), + [pefw] "+r" (pefw), [pcoefTable_ifft] "+r" (pcoefTable_ifft), + [fft] "+r" (fft) + : + : "memory" + ); + + fft[2] = efw[PART_LEN].real; + fft[3] = -efw[PART_LEN].imag; + + outCFFT = WebRtcSpl_ComplexIFFT(fft, PART_LEN_SHIFT, 1); + pfft = fft; + + __asm __volatile ( + ".set push \n\t" + ".set noreorder \n\t" + "addiu %[i], $zero, 128 \n\t" + "1: \n\t" + "lh %[tmp1], 0(%[ppfft]) \n\t" + "lh %[tmp2], 4(%[ppfft]) \n\t" + "lh %[tmp3], 8(%[ppfft]) \n\t" + "lh %[tmp4], 12(%[ppfft]) \n\t" + "addiu %[i], %[i], -4 \n\t" + "sh %[tmp1], 0(%[pfft]) \n\t" + "sh %[tmp2], 2(%[pfft]) \n\t" + "sh %[tmp3], 4(%[pfft]) \n\t" + "sh %[tmp4], 6(%[pfft]) \n\t" + "addiu %[ppfft], %[ppfft], 16 \n\t" + "bgtz %[i], 1b \n\t" + " addiu %[pfft], %[pfft], 8 \n\t" + ".set pop \n\t" + : [tmp1] "=&r" (tmp1), [tmp2] "=&r" (tmp2), [pfft] "+r" (pfft), + [i] "=&r" (i), [tmp3] "=&r" (tmp3), [tmp4] "=&r" (tmp4), + [ppfft] "+r" (ppfft) + : + : "memory" + ); + + pfft = fft; + out_aecm = (int32_t)(outCFFT - aecm->dfaCleanQDomain); + + __asm __volatile ( + ".set push \n\t" + ".set noreorder \n\t" + "addiu %[i], $zero, 64 \n\t" + "11: \n\t" + "lh %[tmp1], 0(%[pfft]) \n\t" + "lh %[tmp2], 0(%[p_kSqrtHanning]) \n\t" + "addiu %[i], %[i], -2 \n\t" + "mul %[tmp1], %[tmp1], %[tmp2] \n\t" + "lh %[tmp3], 2(%[pfft]) \n\t" + "lh %[tmp4], 2(%[p_kSqrtHanning]) \n\t" + "mul %[tmp3], %[tmp3], %[tmp4] \n\t" + "addiu %[tmp1], %[tmp1], 8192 \n\t" + "sra %[tmp1], %[tmp1], 14 \n\t" + "addiu %[tmp3], %[tmp3], 8192 \n\t" + "sra %[tmp3], %[tmp3], 14 \n\t" + "bgez %[out_aecm], 1f \n\t" + " negu %[tmp2], %[out_aecm] \n\t" + "srav %[tmp1], %[tmp1], %[tmp2] \n\t" + "b 2f \n\t" + " srav %[tmp3], %[tmp3], %[tmp2] \n\t" + "1: \n\t" + "sllv %[tmp1], %[tmp1], %[out_aecm] \n\t" + "sllv %[tmp3], %[tmp3], %[out_aecm] \n\t" + "2: \n\t" + "lh %[tmp4], 0(%[paecm_buf]) \n\t" + "lh %[tmp2], 2(%[paecm_buf]) \n\t" + "addu %[tmp3], %[tmp3], %[tmp2] \n\t" + "addu %[tmp1], %[tmp1], %[tmp4] \n\t" +#if defined(MIPS_DSP_R1_LE) + "shll_s.w %[tmp1], %[tmp1], 16 \n\t" + "sra %[tmp1], %[tmp1], 16 \n\t" + "shll_s.w %[tmp3], %[tmp3], 16 \n\t" + "sra %[tmp3], %[tmp3], 16 \n\t" +#else // #if defined(MIPS_DSP_R1_LE) + "sra %[tmp4], %[tmp1], 31 \n\t" + "sra %[tmp2], %[tmp1], 15 \n\t" + "beq %[tmp4], %[tmp2], 3f \n\t" + " ori %[tmp2], $zero, 0x7fff \n\t" + "xor %[tmp1], %[tmp2], %[tmp4] \n\t" + "3: \n\t" + "sra %[tmp2], %[tmp3], 31 \n\t" + "sra %[tmp4], %[tmp3], 15 \n\t" + "beq %[tmp2], %[tmp4], 4f \n\t" + " ori %[tmp4], $zero, 0x7fff \n\t" + "xor %[tmp3], %[tmp4], %[tmp2] \n\t" + "4: \n\t" +#endif // #if defined(MIPS_DSP_R1_LE) + "sh %[tmp1], 0(%[pfft]) \n\t" + "sh %[tmp1], 0(%[output1]) \n\t" + "sh %[tmp3], 2(%[pfft]) \n\t" + "sh %[tmp3], 2(%[output1]) \n\t" + "lh %[tmp1], 128(%[pfft]) \n\t" + "lh %[tmp2], 0(%[pp_kSqrtHanning]) \n\t" + "mul %[tmp1], %[tmp1], %[tmp2] \n\t" + "lh %[tmp3], 130(%[pfft]) \n\t" + "lh %[tmp4], -2(%[pp_kSqrtHanning]) \n\t" + "mul %[tmp3], %[tmp3], %[tmp4] \n\t" + "sra %[tmp1], %[tmp1], 14 \n\t" + "sra %[tmp3], %[tmp3], 14 \n\t" + "bgez %[out_aecm], 5f \n\t" + " negu %[tmp2], %[out_aecm] \n\t" + "srav %[tmp3], %[tmp3], %[tmp2] \n\t" + "b 6f \n\t" + " srav %[tmp1], %[tmp1], %[tmp2] \n\t" + "5: \n\t" + "sllv %[tmp1], %[tmp1], %[out_aecm] \n\t" + "sllv %[tmp3], %[tmp3], %[out_aecm] \n\t" + "6: \n\t" +#if defined(MIPS_DSP_R1_LE) + "shll_s.w %[tmp1], %[tmp1], 16 \n\t" + "sra %[tmp1], %[tmp1], 16 \n\t" + "shll_s.w %[tmp3], %[tmp3], 16 \n\t" + "sra %[tmp3], %[tmp3], 16 \n\t" +#else // #if defined(MIPS_DSP_R1_LE) + "sra %[tmp4], %[tmp1], 31 \n\t" + "sra %[tmp2], %[tmp1], 15 \n\t" + "beq %[tmp4], %[tmp2], 7f \n\t" + " ori %[tmp2], $zero, 0x7fff \n\t" + "xor %[tmp1], %[tmp2], %[tmp4] \n\t" + "7: \n\t" + "sra %[tmp2], %[tmp3], 31 \n\t" + "sra %[tmp4], %[tmp3], 15 \n\t" + "beq %[tmp2], %[tmp4], 8f \n\t" + " ori %[tmp4], $zero, 0x7fff \n\t" + "xor %[tmp3], %[tmp4], %[tmp2] \n\t" + "8: \n\t" +#endif // #if defined(MIPS_DSP_R1_LE) + "sh %[tmp1], 0(%[paecm_buf]) \n\t" + "sh %[tmp3], 2(%[paecm_buf]) \n\t" + "addiu %[output1], %[output1], 4 \n\t" + "addiu %[paecm_buf], %[paecm_buf], 4 \n\t" + "addiu %[pfft], %[pfft], 4 \n\t" + "addiu %[p_kSqrtHanning], %[p_kSqrtHanning], 4 \n\t" + "bgtz %[i], 11b \n\t" + " addiu %[pp_kSqrtHanning], %[pp_kSqrtHanning], -4 \n\t" + ".set pop \n\t" + : [tmp1] "=&r" (tmp1), [tmp2] "=&r" (tmp2), [pfft] "+r" (pfft), + [output1] "+r" (output1), [tmp3] "=&r" (tmp3), [tmp4] "=&r" (tmp4), + [paecm_buf] "+r" (paecm_buf), [i] "=&r" (i), + [pp_kSqrtHanning] "+r" (pp_kSqrtHanning), + [p_kSqrtHanning] "+r" (p_kSqrtHanning) + : [out_aecm] "r" (out_aecm), + [WebRtcAecm_kSqrtHanning] "r" (WebRtcAecm_kSqrtHanning) + : "hi", "lo","memory" + ); + + // Copy the current block to the old position + // (aecm->outBuf is shifted elsewhere) + memcpy(aecm->xBuf, aecm->xBuf + PART_LEN, sizeof(int16_t) * PART_LEN); + memcpy(aecm->dBufNoisy, + aecm->dBufNoisy + PART_LEN, + sizeof(int16_t) * PART_LEN); + if (nearendClean != NULL) { + memcpy(aecm->dBufClean, + aecm->dBufClean + PART_LEN, + sizeof(int16_t) * PART_LEN); + } +} + +void WebRtcAecm_CalcLinearEnergies_mips(AecmCore_t* aecm, + const uint16_t* far_spectrum, + int32_t* echo_est, + uint32_t* far_energy, + uint32_t* echo_energy_adapt, + uint32_t* echo_energy_stored) { + int i; + uint32_t par1 = (*far_energy); + uint32_t par2 = (*echo_energy_adapt); + uint32_t par3 = (*echo_energy_stored); + int16_t* ch_stored_p = &(aecm->channelStored[0]); + int16_t* ch_adapt_p = &(aecm->channelAdapt16[0]); + uint16_t* spectrum_p = (uint16_t*)(&(far_spectrum[0])); + int32_t* echo_p = &(echo_est[0]); + int32_t temp0, stored0, echo0, adept0, spectrum0; + int32_t stored1, adept1, spectrum1, echo1, temp1; + + // Get energy for the delayed far end signal and estimated + // echo using both stored and adapted channels. + for (i = 0; i < PART_LEN; i+= 4) { + __asm __volatile ( + ".set push \n\t" + ".set noreorder \n\t" + "lh %[stored0], 0(%[ch_stored_p]) \n\t" + "lhu %[adept0], 0(%[ch_adapt_p]) \n\t" + "lhu %[spectrum0], 0(%[spectrum_p]) \n\t" + "lh %[stored1], 2(%[ch_stored_p]) \n\t" + "lhu %[adept1], 2(%[ch_adapt_p]) \n\t" + "lhu %[spectrum1], 2(%[spectrum_p]) \n\t" + "mul %[echo0], %[stored0], %[spectrum0] \n\t" + "mul %[temp0], %[adept0], %[spectrum0] \n\t" + "mul %[echo1], %[stored1], %[spectrum1] \n\t" + "mul %[temp1], %[adept1], %[spectrum1] \n\t" + "addu %[par1], %[par1], %[spectrum0] \n\t" + "addu %[par1], %[par1], %[spectrum1] \n\t" + "addiu %[echo_p], %[echo_p], 16 \n\t" + "addu %[par3], %[par3], %[echo0] \n\t" + "addu %[par2], %[par2], %[temp0] \n\t" + "addu %[par3], %[par3], %[echo1] \n\t" + "addu %[par2], %[par2], %[temp1] \n\t" + "usw %[echo0], -16(%[echo_p]) \n\t" + "usw %[echo1], -12(%[echo_p]) \n\t" + "lh %[stored0], 4(%[ch_stored_p]) \n\t" + "lhu %[adept0], 4(%[ch_adapt_p]) \n\t" + "lhu %[spectrum0], 4(%[spectrum_p]) \n\t" + "lh %[stored1], 6(%[ch_stored_p]) \n\t" + "lhu %[adept1], 6(%[ch_adapt_p]) \n\t" + "lhu %[spectrum1], 6(%[spectrum_p]) \n\t" + "mul %[echo0], %[stored0], %[spectrum0] \n\t" + "mul %[temp0], %[adept0], %[spectrum0] \n\t" + "mul %[echo1], %[stored1], %[spectrum1] \n\t" + "mul %[temp1], %[adept1], %[spectrum1] \n\t" + "addu %[par1], %[par1], %[spectrum0] \n\t" + "addu %[par1], %[par1], %[spectrum1] \n\t" + "addiu %[ch_stored_p], %[ch_stored_p], 8 \n\t" + "addiu %[ch_adapt_p], %[ch_adapt_p], 8 \n\t" + "addiu %[spectrum_p], %[spectrum_p], 8 \n\t" + "addu %[par3], %[par3], %[echo0] \n\t" + "addu %[par2], %[par2], %[temp0] \n\t" + "addu %[par3], %[par3], %[echo1] \n\t" + "addu %[par2], %[par2], %[temp1] \n\t" + "usw %[echo0], -8(%[echo_p]) \n\t" + "usw %[echo1], -4(%[echo_p]) \n\t" + ".set pop \n\t" + : [temp0] "=&r" (temp0), [stored0] "=&r" (stored0), + [adept0] "=&r" (adept0), [spectrum0] "=&r" (spectrum0), + [echo0] "=&r" (echo0), [echo_p] "+r" (echo_p), [par3] "+r" (par3), + [par1] "+r" (par1), [par2] "+r" (par2), [stored1] "=&r" (stored1), + [adept1] "=&r" (adept1), [echo1] "=&r" (echo1), + [spectrum1] "=&r" (spectrum1), [temp1] "=&r" (temp1), + [ch_stored_p] "+r" (ch_stored_p), [ch_adapt_p] "+r" (ch_adapt_p), + [spectrum_p] "+r" (spectrum_p) + : + : "hi", "lo", "memory" + ); + } + + echo_est[PART_LEN] = WEBRTC_SPL_MUL_16_U16(aecm->channelStored[PART_LEN], + far_spectrum[PART_LEN]); + par1 += (uint32_t)(far_spectrum[PART_LEN]); + par2 += WEBRTC_SPL_UMUL_16_16(aecm->channelAdapt16[PART_LEN], + far_spectrum[PART_LEN]); + par3 += (uint32_t)echo_est[PART_LEN]; + + (*far_energy) = par1; + (*echo_energy_adapt) = par2; + (*echo_energy_stored) = par3; +} + +#if defined(MIPS_DSP_R1_LE) +void WebRtcAecm_StoreAdaptiveChannel_mips(AecmCore_t* aecm, + const uint16_t* far_spectrum, + int32_t* echo_est) { + int i; + int16_t* temp1; + uint16_t* temp8; + int32_t temp0, temp2, temp3, temp4, temp5, temp6; + int32_t* temp7 = &(echo_est[0]); + temp1 = &(aecm->channelStored[0]); + temp8 = (uint16_t*)(&far_spectrum[0]); + + // During startup we store the channel every block. + memcpy(aecm->channelStored, aecm->channelAdapt16, + sizeof(int16_t) * PART_LEN1); + // Recalculate echo estimate + for (i = 0; i < PART_LEN; i += 4) { + __asm __volatile ( + "ulw %[temp0], 0(%[temp8]) \n\t" + "ulw %[temp2], 0(%[temp1]) \n\t" + "ulw %[temp4], 4(%[temp8]) \n\t" + "ulw %[temp5], 4(%[temp1]) \n\t" + "muleq_s.w.phl %[temp3], %[temp2], %[temp0] \n\t" + "muleq_s.w.phr %[temp0], %[temp2], %[temp0] \n\t" + "muleq_s.w.phl %[temp6], %[temp5], %[temp4] \n\t" + "muleq_s.w.phr %[temp4], %[temp5], %[temp4] \n\t" + "addiu %[temp7], %[temp7], 16 \n\t" + "addiu %[temp1], %[temp1], 8 \n\t" + "addiu %[temp8], %[temp8], 8 \n\t" + "sra %[temp3], %[temp3], 1 \n\t" + "sra %[temp0], %[temp0], 1 \n\t" + "sra %[temp6], %[temp6], 1 \n\t" + "sra %[temp4], %[temp4], 1 \n\t" + "usw %[temp3], -12(%[temp7]) \n\t" + "usw %[temp0], -16(%[temp7]) \n\t" + "usw %[temp6], -4(%[temp7]) \n\t" + "usw %[temp4], -8(%[temp7]) \n\t" + : [temp0] "=&r" (temp0), [temp2] "=&r" (temp2), [temp3] "=&r" (temp3), + [temp4] "=&r" (temp4), [temp5] "=&r" (temp5), [temp6] "=&r" (temp6), + [temp1] "+r" (temp1), [temp8] "+r" (temp8), [temp7] "+r" (temp7) + : + : "hi", "lo", "memory" + ); + } + echo_est[i] = WEBRTC_SPL_MUL_16_U16(aecm->channelStored[i], + far_spectrum[i]); +} + +void WebRtcAecm_ResetAdaptiveChannel_mips(AecmCore_t* aecm) { + int i; + int32_t* temp3; + int16_t* temp0; + int32_t temp1, temp2, temp4, temp5; + + temp0 = &(aecm->channelStored[0]); + temp3 = &(aecm->channelAdapt32[0]); + + // The stored channel has a significantly lower MSE than the adaptive one for + // two consecutive calculations. Reset the adaptive channel. + memcpy(aecm->channelAdapt16, + aecm->channelStored, + sizeof(int16_t) * PART_LEN1); + + // Restore the W32 channel + for (i = 0; i < PART_LEN; i += 4) { + __asm __volatile ( + "ulw %[temp1], 0(%[temp0]) \n\t" + "ulw %[temp4], 4(%[temp0]) \n\t" + "preceq.w.phl %[temp2], %[temp1] \n\t" + "preceq.w.phr %[temp1], %[temp1] \n\t" + "preceq.w.phl %[temp5], %[temp4] \n\t" + "preceq.w.phr %[temp4], %[temp4] \n\t" + "addiu %[temp0], %[temp0], 8 \n\t" + "usw %[temp2], 4(%[temp3]) \n\t" + "usw %[temp1], 0(%[temp3]) \n\t" + "usw %[temp5], 12(%[temp3]) \n\t" + "usw %[temp4], 8(%[temp3]) \n\t" + "addiu %[temp3], %[temp3], 16 \n\t" + : [temp1] "=&r" (temp1), [temp2] "=&r" (temp2), + [temp4] "=&r" (temp4), [temp5] "=&r" (temp5), + [temp3] "+r" (temp3), [temp0] "+r" (temp0) + : + : "memory" + ); + } + + aecm->channelAdapt32[i] = WEBRTC_SPL_LSHIFT_W32( + (int32_t)aecm->channelStored[i], 16); +} +#endif // #if defined(MIPS_DSP_R1_LE) + +// Transforms a time domain signal into the frequency domain, outputting the +// complex valued signal, absolute value and sum of absolute values. +// +// time_signal [in] Pointer to time domain signal +// freq_signal_real [out] Pointer to real part of frequency domain array +// freq_signal_imag [out] Pointer to imaginary part of frequency domain +// array +// freq_signal_abs [out] Pointer to absolute value of frequency domain +// array +// freq_signal_sum_abs [out] Pointer to the sum of all absolute values in +// the frequency domain array +// return value The Q-domain of current frequency values +// +static int TimeToFrequencyDomain(AecmCore_t* aecm, + const int16_t* time_signal, + complex16_t* freq_signal, + uint16_t* freq_signal_abs, + uint32_t* freq_signal_sum_abs) +{ + int i = 0; + int time_signal_scaling = 0; + + // In fft_buf, +16 for 32-byte alignment. + int16_t fft_buf[PART_LEN4 + 16]; + int16_t *fft = (int16_t *) (((uintptr_t) fft_buf + 31) & ~31); + + int16_t tmp16no1; +#if !defined(MIPS_DSP_R2_LE) + int32_t tmp32no1; + int32_t tmp32no2; + int16_t tmp16no2; +#else + int32_t tmp32no10, tmp32no11, tmp32no12, tmp32no13; + int32_t tmp32no20, tmp32no21, tmp32no22, tmp32no23; + int16_t* freqp; + uint16_t* freqabsp; + uint32_t freqt0, freqt1, freqt2, freqt3; + uint32_t freqs; +#endif + +#ifdef AECM_DYNAMIC_Q + tmp16no1 = WebRtcSpl_MaxAbsValueW16(time_signal, PART_LEN2); + time_signal_scaling = WebRtcSpl_NormW16(tmp16no1); +#endif + + WindowAndFFT(aecm, fft, time_signal, freq_signal, time_signal_scaling); + + // Extract imaginary and real part, + // calculate the magnitude for all frequency bins + freq_signal[0].imag = 0; + freq_signal[PART_LEN].imag = 0; + freq_signal[PART_LEN].real = fft[PART_LEN2]; + freq_signal_abs[0] = (uint16_t)WEBRTC_SPL_ABS_W16(freq_signal[0].real); + freq_signal_abs[PART_LEN] = (uint16_t)WEBRTC_SPL_ABS_W16( + freq_signal[PART_LEN].real); + (*freq_signal_sum_abs) = (uint32_t)(freq_signal_abs[0]) + + (uint32_t)(freq_signal_abs[PART_LEN]); + +#if !defined(MIPS_DSP_R2_LE) + for (i = 1; i < PART_LEN; i++) { + if (freq_signal[i].real == 0) + { + freq_signal_abs[i] = (uint16_t)WEBRTC_SPL_ABS_W16( + freq_signal[i].imag); + } + else if (freq_signal[i].imag == 0) + { + freq_signal_abs[i] = (uint16_t)WEBRTC_SPL_ABS_W16( + freq_signal[i].real); + } + else + { + // Approximation for magnitude of complex fft output + // magn = sqrt(real^2 + imag^2) + // magn ~= alpha * max(|imag|,|real|) + beta * min(|imag|,|real|) + // + // The parameters alpha and beta are stored in Q15 + tmp16no1 = WEBRTC_SPL_ABS_W16(freq_signal[i].real); + tmp16no2 = WEBRTC_SPL_ABS_W16(freq_signal[i].imag); + tmp32no1 = WEBRTC_SPL_MUL_16_16(tmp16no1, tmp16no1); + tmp32no2 = WEBRTC_SPL_MUL_16_16(tmp16no2, tmp16no2); + tmp32no2 = WEBRTC_SPL_ADD_SAT_W32(tmp32no1, tmp32no2); + tmp32no1 = WebRtcSpl_SqrtFloor(tmp32no2); + + freq_signal_abs[i] = (uint16_t)tmp32no1; + } + (*freq_signal_sum_abs) += (uint32_t)freq_signal_abs[i]; + } +#else // #if !defined(MIPS_DSP_R2_LE) + freqs = (uint32_t)(freq_signal_abs[0]) + + (uint32_t)(freq_signal_abs[PART_LEN]); + freqp = &(freq_signal[1].real); + + __asm __volatile ( + "lw %[freqt0], 0(%[freqp]) \n\t" + "lw %[freqt1], 4(%[freqp]) \n\t" + "lw %[freqt2], 8(%[freqp]) \n\t" + "mult $ac0, $zero, $zero \n\t" + "mult $ac1, $zero, $zero \n\t" + "mult $ac2, $zero, $zero \n\t" + "dpaq_s.w.ph $ac0, %[freqt0], %[freqt0] \n\t" + "dpaq_s.w.ph $ac1, %[freqt1], %[freqt1] \n\t" + "dpaq_s.w.ph $ac2, %[freqt2], %[freqt2] \n\t" + "addiu %[freqp], %[freqp], 12 \n\t" + "extr.w %[tmp32no20], $ac0, 1 \n\t" + "extr.w %[tmp32no21], $ac1, 1 \n\t" + "extr.w %[tmp32no22], $ac2, 1 \n\t" + : [freqt0] "=&r" (freqt0), [freqt1] "=&r" (freqt1), + [freqt2] "=&r" (freqt2), [freqp] "+r" (freqp), + [tmp32no20] "=r" (tmp32no20), [tmp32no21] "=r" (tmp32no21), + [tmp32no22] "=r" (tmp32no22) + : + : "memory", "hi", "lo", "$ac1hi", "$ac1lo", "$ac2hi", "$ac2lo" + ); + + tmp32no10 = WebRtcSpl_SqrtFloor(tmp32no20); + tmp32no11 = WebRtcSpl_SqrtFloor(tmp32no21); + tmp32no12 = WebRtcSpl_SqrtFloor(tmp32no22); + freq_signal_abs[1] = (uint16_t)tmp32no10; + freq_signal_abs[2] = (uint16_t)tmp32no11; + freq_signal_abs[3] = (uint16_t)tmp32no12; + freqs += (uint32_t)tmp32no10; + freqs += (uint32_t)tmp32no11; + freqs += (uint32_t)tmp32no12; + freqabsp = &(freq_signal_abs[4]); + for (i = 4; i < PART_LEN; i+=4) + { + __asm __volatile ( + "ulw %[freqt0], 0(%[freqp]) \n\t" + "ulw %[freqt1], 4(%[freqp]) \n\t" + "ulw %[freqt2], 8(%[freqp]) \n\t" + "ulw %[freqt3], 12(%[freqp]) \n\t" + "mult $ac0, $zero, $zero \n\t" + "mult $ac1, $zero, $zero \n\t" + "mult $ac2, $zero, $zero \n\t" + "mult $ac3, $zero, $zero \n\t" + "dpaq_s.w.ph $ac0, %[freqt0], %[freqt0] \n\t" + "dpaq_s.w.ph $ac1, %[freqt1], %[freqt1] \n\t" + "dpaq_s.w.ph $ac2, %[freqt2], %[freqt2] \n\t" + "dpaq_s.w.ph $ac3, %[freqt3], %[freqt3] \n\t" + "addiu %[freqp], %[freqp], 16 \n\t" + "addiu %[freqabsp], %[freqabsp], 8 \n\t" + "extr.w %[tmp32no20], $ac0, 1 \n\t" + "extr.w %[tmp32no21], $ac1, 1 \n\t" + "extr.w %[tmp32no22], $ac2, 1 \n\t" + "extr.w %[tmp32no23], $ac3, 1 \n\t" + : [freqt0] "=&r" (freqt0), [freqt1] "=&r" (freqt1), + [freqt2] "=&r" (freqt2), [freqt3] "=&r" (freqt3), + [tmp32no20] "=r" (tmp32no20), [tmp32no21] "=r" (tmp32no21), + [tmp32no22] "=r" (tmp32no22), [tmp32no23] "=r" (tmp32no23), + [freqabsp] "+r" (freqabsp), [freqp] "+r" (freqp) + : + : "memory", "hi", "lo", "$ac1hi", "$ac1lo", + "$ac2hi", "$ac2lo", "$ac3hi", "$ac3lo" + ); + + tmp32no10 = WebRtcSpl_SqrtFloor(tmp32no20); + tmp32no11 = WebRtcSpl_SqrtFloor(tmp32no21); + tmp32no12 = WebRtcSpl_SqrtFloor(tmp32no22); + tmp32no13 = WebRtcSpl_SqrtFloor(tmp32no23); + + __asm __volatile ( + "sh %[tmp32no10], -8(%[freqabsp]) \n\t" + "sh %[tmp32no11], -6(%[freqabsp]) \n\t" + "sh %[tmp32no12], -4(%[freqabsp]) \n\t" + "sh %[tmp32no13], -2(%[freqabsp]) \n\t" + "addu %[freqs], %[freqs], %[tmp32no10] \n\t" + "addu %[freqs], %[freqs], %[tmp32no11] \n\t" + "addu %[freqs], %[freqs], %[tmp32no12] \n\t" + "addu %[freqs], %[freqs], %[tmp32no13] \n\t" + : [freqs] "+r" (freqs) + : [tmp32no10] "r" (tmp32no10), [tmp32no11] "r" (tmp32no11), + [tmp32no12] "r" (tmp32no12), [tmp32no13] "r" (tmp32no13), + [freqabsp] "r" (freqabsp) + : "memory" + ); + } + + (*freq_signal_sum_abs) = freqs; +#endif + + return time_signal_scaling; +} + +int WebRtcAecm_ProcessBlock(AecmCore_t* aecm, + const int16_t* farend, + const int16_t* nearendNoisy, + const int16_t* nearendClean, + int16_t* output) { + int i; + uint32_t xfaSum; + uint32_t dfaNoisySum; + uint32_t dfaCleanSum; + uint32_t echoEst32Gained; + uint32_t tmpU32; + int32_t tmp32no1; + + uint16_t xfa[PART_LEN1]; + uint16_t dfaNoisy[PART_LEN1]; + uint16_t dfaClean[PART_LEN1]; + uint16_t* ptrDfaClean = dfaClean; + const uint16_t* far_spectrum_ptr = NULL; + + // 32 byte aligned buffers (with +8 or +16). + int16_t fft_buf[PART_LEN4 + 2 + 16]; // +2 to make a loop safe. + int32_t echoEst32_buf[PART_LEN1 + 8]; + int32_t dfw_buf[PART_LEN2 + 8]; + int32_t efw_buf[PART_LEN2 + 8]; + + int16_t* fft = (int16_t*)(((uint32_t)fft_buf + 31) & ~ 31); + int32_t* echoEst32 = (int32_t*)(((uint32_t)echoEst32_buf + 31) & ~ 31); + complex16_t* dfw = (complex16_t*)(((uint32_t)dfw_buf + 31) & ~ 31); + complex16_t* efw = (complex16_t*)(((uint32_t)efw_buf + 31) & ~ 31); + + int16_t hnl[PART_LEN1]; + int16_t numPosCoef = 0; + int delay; + int16_t tmp16no1; + int16_t tmp16no2; + int16_t mu; + int16_t supGain; + int16_t zeros32, zeros16; + int16_t zerosDBufNoisy, zerosDBufClean, zerosXBuf; + int far_q; + int16_t resolutionDiff, qDomainDiff; + + const int kMinPrefBand = 4; + const int kMaxPrefBand = 24; + int32_t avgHnl32 = 0; + + int32_t temp1, temp2, temp3, temp4, temp5, temp6, temp7, temp8; + int16_t* ptr; + int16_t* ptr1; + int16_t* er_ptr; + int16_t* dr_ptr; + + ptr = &hnl[0]; + ptr1 = &hnl[0]; + er_ptr = &efw[0].real; + dr_ptr = &dfw[0].real; + + // Determine startup state. There are three states: + // (0) the first CONV_LEN blocks + // (1) another CONV_LEN blocks + // (2) the rest + + if (aecm->startupState < 2) { + aecm->startupState = (aecm->totCount >= CONV_LEN) + + (aecm->totCount >= CONV_LEN2); + } + // END: Determine startup state + + // Buffer near and far end signals + memcpy(aecm->xBuf + PART_LEN, farend, sizeof(int16_t) * PART_LEN); + memcpy(aecm->dBufNoisy + PART_LEN, + nearendNoisy, + sizeof(int16_t) * PART_LEN); + if (nearendClean != NULL) { + memcpy(aecm->dBufClean + PART_LEN, + nearendClean, + sizeof(int16_t) * PART_LEN); + } + + // Transform far end signal from time domain to frequency domain. + far_q = TimeToFrequencyDomain(aecm, + aecm->xBuf, + dfw, + xfa, + &xfaSum); + + // Transform noisy near end signal from time domain to frequency domain. + zerosDBufNoisy = TimeToFrequencyDomain(aecm, + aecm->dBufNoisy, + dfw, + dfaNoisy, + &dfaNoisySum); + aecm->dfaNoisyQDomainOld = aecm->dfaNoisyQDomain; + aecm->dfaNoisyQDomain = (int16_t)zerosDBufNoisy; + + if (nearendClean == NULL) { + ptrDfaClean = dfaNoisy; + aecm->dfaCleanQDomainOld = aecm->dfaNoisyQDomainOld; + aecm->dfaCleanQDomain = aecm->dfaNoisyQDomain; + dfaCleanSum = dfaNoisySum; + } else { + // Transform clean near end signal from time domain to frequency domain. + zerosDBufClean = TimeToFrequencyDomain(aecm, + aecm->dBufClean, + dfw, + dfaClean, + &dfaCleanSum); + aecm->dfaCleanQDomainOld = aecm->dfaCleanQDomain; + aecm->dfaCleanQDomain = (int16_t)zerosDBufClean; + } + + // Get the delay + // Save far-end history and estimate delay + WebRtcAecm_UpdateFarHistory(aecm, xfa, far_q); + + if (WebRtc_AddFarSpectrumFix(aecm->delay_estimator_farend, xfa, PART_LEN1, + far_q) == -1) { + return -1; + } + delay = WebRtc_DelayEstimatorProcessFix(aecm->delay_estimator, + dfaNoisy, + PART_LEN1, + zerosDBufNoisy); + if (delay == -1) { + return -1; + } + else if (delay == -2) { + // If the delay is unknown, we assume zero. + // NOTE: this will have to be adjusted if we ever add lookahead. + delay = 0; + } + + if (aecm->fixedDelay >= 0) { + // Use fixed delay + delay = aecm->fixedDelay; + } + + // Get aligned far end spectrum + far_spectrum_ptr = WebRtcAecm_AlignedFarend(aecm, &far_q, delay); + zerosXBuf = (int16_t) far_q; + + if (far_spectrum_ptr == NULL) { + return -1; + } + + // Calculate log(energy) and update energy threshold levels + WebRtcAecm_CalcEnergies(aecm, + far_spectrum_ptr, + zerosXBuf, + dfaNoisySum, + echoEst32); + // Calculate stepsize + mu = WebRtcAecm_CalcStepSize(aecm); + + // Update counters + aecm->totCount++; + + // This is the channel estimation algorithm. + // It is base on NLMS but has a variable step length, + // which was calculated above. + WebRtcAecm_UpdateChannel(aecm, + far_spectrum_ptr, + zerosXBuf, + dfaNoisy, + mu, + echoEst32); + + supGain = WebRtcAecm_CalcSuppressionGain(aecm); + + // Calculate Wiener filter hnl[] + for (i = 0; i < PART_LEN1; i++) { + // Far end signal through channel estimate in Q8 + // How much can we shift right to preserve resolution + tmp32no1 = echoEst32[i] - aecm->echoFilt[i]; + aecm->echoFilt[i] += WEBRTC_SPL_RSHIFT_W32( + WEBRTC_SPL_MUL_32_16(tmp32no1, 50), 8); + + zeros32 = WebRtcSpl_NormW32(aecm->echoFilt[i]) + 1; + zeros16 = WebRtcSpl_NormW16(supGain) + 1; + if (zeros32 + zeros16 > 16) { + // Multiplication is safe + // Result in + // Q(RESOLUTION_CHANNEL+RESOLUTION_SUPGAIN+aecm->xfaQDomainBuf[diff]) + echoEst32Gained = WEBRTC_SPL_UMUL_32_16((uint32_t)aecm->echoFilt[i], + (uint16_t)supGain); + resolutionDiff = 14 - RESOLUTION_CHANNEL16 - RESOLUTION_SUPGAIN; + resolutionDiff += (aecm->dfaCleanQDomain - zerosXBuf); + } else { + tmp16no1 = 17 - zeros32 - zeros16; + resolutionDiff = 14 + tmp16no1 - RESOLUTION_CHANNEL16 - + RESOLUTION_SUPGAIN; + resolutionDiff += (aecm->dfaCleanQDomain - zerosXBuf); + if (zeros32 > tmp16no1) { + echoEst32Gained = WEBRTC_SPL_UMUL_32_16( + (uint32_t)aecm->echoFilt[i], + (uint16_t)WEBRTC_SPL_RSHIFT_W16(supGain, tmp16no1)); + } else { + // Result in Q-(RESOLUTION_CHANNEL+RESOLUTION_SUPGAIN-16) + echoEst32Gained = WEBRTC_SPL_UMUL_32_16( + (uint32_t)WEBRTC_SPL_RSHIFT_W32(aecm->echoFilt[i], + tmp16no1), + (uint16_t)supGain); + } + } + + zeros16 = WebRtcSpl_NormW16(aecm->nearFilt[i]); + if ((zeros16 < (aecm->dfaCleanQDomain - aecm->dfaCleanQDomainOld)) + & (aecm->nearFilt[i])) { + tmp16no1 = WEBRTC_SPL_SHIFT_W16(aecm->nearFilt[i], zeros16); + qDomainDiff = zeros16 - aecm->dfaCleanQDomain + aecm->dfaCleanQDomainOld; + tmp16no2 = WEBRTC_SPL_SHIFT_W16(ptrDfaClean[i], qDomainDiff); + } else { + tmp16no1 = WEBRTC_SPL_SHIFT_W16(aecm->nearFilt[i], + aecm->dfaCleanQDomain + - aecm->dfaCleanQDomainOld); + qDomainDiff = 0; + tmp16no2 = ptrDfaClean[i]; + } + + tmp32no1 = (int32_t)(tmp16no2 - tmp16no1); + tmp16no2 = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32no1, 4); + tmp16no2 += tmp16no1; + zeros16 = WebRtcSpl_NormW16(tmp16no2); + if ((tmp16no2) & (-qDomainDiff > zeros16)) { + aecm->nearFilt[i] = WEBRTC_SPL_WORD16_MAX; + } else { + aecm->nearFilt[i] = WEBRTC_SPL_SHIFT_W16(tmp16no2, -qDomainDiff); + } + + // Wiener filter coefficients, resulting hnl in Q14 + if (echoEst32Gained == 0) { + hnl[i] = ONE_Q14; + numPosCoef++; + } else if (aecm->nearFilt[i] == 0) { + hnl[i] = 0; + } else { + // Multiply the suppression gain + // Rounding + echoEst32Gained += (uint32_t)(aecm->nearFilt[i] >> 1); + tmpU32 = WebRtcSpl_DivU32U16(echoEst32Gained, + (uint16_t)aecm->nearFilt[i]); + + // Current resolution is + // Q-(RESOLUTION_CHANNEL + RESOLUTION_SUPGAIN + // - max(0, 17 - zeros16 - zeros32)) + // Make sure we are in Q14 + tmp32no1 = (int32_t)WEBRTC_SPL_SHIFT_W32(tmpU32, resolutionDiff); + if (tmp32no1 > ONE_Q14) { + hnl[i] = 0; + } else if (tmp32no1 < 0) { + hnl[i] = ONE_Q14; + numPosCoef++; + } else { + // 1-echoEst/dfa + hnl[i] = ONE_Q14 - (int16_t)tmp32no1; + if (hnl[i] <= 0) { + hnl[i] = 0; + } else { + numPosCoef++; + } + } + } + } + + // Only in wideband. Prevent the gain in upper band from being larger than + // in lower band. + if (aecm->mult == 2) { + // TODO(bjornv): Investigate if the scaling of hnl[i] below can cause + // speech distortion in double-talk. + for (i = 0; i < (PART_LEN1 >> 3); i++) { + __asm __volatile ( + "lh %[temp1], 0(%[ptr1]) \n\t" + "lh %[temp2], 2(%[ptr1]) \n\t" + "lh %[temp3], 4(%[ptr1]) \n\t" + "lh %[temp4], 6(%[ptr1]) \n\t" + "lh %[temp5], 8(%[ptr1]) \n\t" + "lh %[temp6], 10(%[ptr1]) \n\t" + "lh %[temp7], 12(%[ptr1]) \n\t" + "lh %[temp8], 14(%[ptr1]) \n\t" + "mul %[temp1], %[temp1], %[temp1] \n\t" + "mul %[temp2], %[temp2], %[temp2] \n\t" + "mul %[temp3], %[temp3], %[temp3] \n\t" + "mul %[temp4], %[temp4], %[temp4] \n\t" + "mul %[temp5], %[temp5], %[temp5] \n\t" + "mul %[temp6], %[temp6], %[temp6] \n\t" + "mul %[temp7], %[temp7], %[temp7] \n\t" + "mul %[temp8], %[temp8], %[temp8] \n\t" + "sra %[temp1], %[temp1], 14 \n\t" + "sra %[temp2], %[temp2], 14 \n\t" + "sra %[temp3], %[temp3], 14 \n\t" + "sra %[temp4], %[temp4], 14 \n\t" + "sra %[temp5], %[temp5], 14 \n\t" + "sra %[temp6], %[temp6], 14 \n\t" + "sra %[temp7], %[temp7], 14 \n\t" + "sra %[temp8], %[temp8], 14 \n\t" + "sh %[temp1], 0(%[ptr1]) \n\t" + "sh %[temp2], 2(%[ptr1]) \n\t" + "sh %[temp3], 4(%[ptr1]) \n\t" + "sh %[temp4], 6(%[ptr1]) \n\t" + "sh %[temp5], 8(%[ptr1]) \n\t" + "sh %[temp6], 10(%[ptr1]) \n\t" + "sh %[temp7], 12(%[ptr1]) \n\t" + "sh %[temp8], 14(%[ptr1]) \n\t" + "addiu %[ptr1], %[ptr1], 16 \n\t" + : [temp1] "=&r" (temp1), [temp2] "=&r" (temp2), [temp3] "=&r" (temp3), + [temp4] "=&r" (temp4), [temp5] "=&r" (temp5), [temp6] "=&r" (temp6), + [temp7] "=&r" (temp7), [temp8] "=&r" (temp8), [ptr1] "+r" (ptr1) + : + : "memory", "hi", "lo" + ); + } + for(i = 0; i < (PART_LEN1 & 7); i++) { + __asm __volatile ( + "lh %[temp1], 0(%[ptr1]) \n\t" + "mul %[temp1], %[temp1], %[temp1] \n\t" + "sra %[temp1], %[temp1], 14 \n\t" + "sh %[temp1], 0(%[ptr1]) \n\t" + "addiu %[ptr1], %[ptr1], 2 \n\t" + : [temp1] "=&r" (temp1), [ptr1] "+r" (ptr1) + : + : "memory", "hi", "lo" + ); + } + + for (i = kMinPrefBand; i <= kMaxPrefBand; i++) { + avgHnl32 += (int32_t)hnl[i]; + } + + assert(kMaxPrefBand - kMinPrefBand + 1 > 0); + avgHnl32 /= (kMaxPrefBand - kMinPrefBand + 1); + + for (i = kMaxPrefBand; i < PART_LEN1; i++) { + if (hnl[i] > (int16_t)avgHnl32) { + hnl[i] = (int16_t)avgHnl32; + } + } + } + + // Calculate NLP gain, result is in Q14 + if (aecm->nlpFlag) { + if (numPosCoef < 3) { + for (i = 0; i < PART_LEN1; i++) { + efw[i].real = 0; + efw[i].imag = 0; + hnl[i] = 0; + } + } else { + for (i = 0; i < PART_LEN1; i++) { +#if defined(MIPS_DSP_R1_LE) + __asm __volatile ( + ".set push \n\t" + ".set noreorder \n\t" + "lh %[temp1], 0(%[ptr]) \n\t" + "lh %[temp2], 0(%[dr_ptr]) \n\t" + "slti %[temp4], %[temp1], 0x4001 \n\t" + "beqz %[temp4], 3f \n\t" + " lh %[temp3], 2(%[dr_ptr]) \n\t" + "slti %[temp5], %[temp1], 3277 \n\t" + "bnez %[temp5], 2f \n\t" + " addiu %[dr_ptr], %[dr_ptr], 4 \n\t" + "mul %[temp2], %[temp2], %[temp1] \n\t" + "mul %[temp3], %[temp3], %[temp1] \n\t" + "shra_r.w %[temp2], %[temp2], 14 \n\t" + "shra_r.w %[temp3], %[temp3], 14 \n\t" + "b 4f \n\t" + " nop \n\t" + "2: \n\t" + "addu %[temp1], $zero, $zero \n\t" + "addu %[temp2], $zero, $zero \n\t" + "addu %[temp3], $zero, $zero \n\t" + "b 1f \n\t" + " nop \n\t" + "3: \n\t" + "addiu %[temp1], $0, 0x4000 \n\t" + "1: \n\t" + "sh %[temp1], 0(%[ptr]) \n\t" + "4: \n\t" + "sh %[temp2], 0(%[er_ptr]) \n\t" + "sh %[temp3], 2(%[er_ptr]) \n\t" + "addiu %[ptr], %[ptr], 2 \n\t" + "addiu %[er_ptr], %[er_ptr], 4 \n\t" + ".set pop \n\t" + : [temp1] "=&r" (temp1), [temp2] "=&r" (temp2), [temp3] "=&r" (temp3), + [temp4] "=&r" (temp4), [temp5] "=&r" (temp5), [ptr] "+r" (ptr), + [er_ptr] "+r" (er_ptr), [dr_ptr] "+r" (dr_ptr) + : + : "memory", "hi", "lo" + ); +#else + __asm __volatile ( + ".set push \n\t" + ".set noreorder \n\t" + "lh %[temp1], 0(%[ptr]) \n\t" + "lh %[temp2], 0(%[dr_ptr]) \n\t" + "slti %[temp4], %[temp1], 0x4001 \n\t" + "beqz %[temp4], 3f \n\t" + " lh %[temp3], 2(%[dr_ptr]) \n\t" + "slti %[temp5], %[temp1], 3277 \n\t" + "bnez %[temp5], 2f \n\t" + " addiu %[dr_ptr], %[dr_ptr], 4 \n\t" + "mul %[temp2], %[temp2], %[temp1] \n\t" + "mul %[temp3], %[temp3], %[temp1] \n\t" + "addiu %[temp2], %[temp2], 0x2000 \n\t" + "addiu %[temp3], %[temp3], 0x2000 \n\t" + "sra %[temp2], %[temp2], 14 \n\t" + "sra %[temp3], %[temp3], 14 \n\t" + "b 4f \n\t" + " nop \n\t" + "2: \n\t" + "addu %[temp1], $zero, $zero \n\t" + "addu %[temp2], $zero, $zero \n\t" + "addu %[temp3], $zero, $zero \n\t" + "b 1f \n\t" + " nop \n\t" + "3: \n\t" + "addiu %[temp1], $0, 0x4000 \n\t" + "1: \n\t" + "sh %[temp1], 0(%[ptr]) \n\t" + "4: \n\t" + "sh %[temp2], 0(%[er_ptr]) \n\t" + "sh %[temp3], 2(%[er_ptr]) \n\t" + "addiu %[ptr], %[ptr], 2 \n\t" + "addiu %[er_ptr], %[er_ptr], 4 \n\t" + ".set pop \n\t" + : [temp1] "=&r" (temp1), [temp2] "=&r" (temp2), [temp3] "=&r" (temp3), + [temp4] "=&r" (temp4), [temp5] "=&r" (temp5), [ptr] "+r" (ptr), + [er_ptr] "+r" (er_ptr), [dr_ptr] "+r" (dr_ptr) + : + : "memory", "hi", "lo" + ); +#endif + } + } + } + else { + // multiply with Wiener coefficients + for (i = 0; i < PART_LEN1; i++) { + efw[i].real = (int16_t) + (WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(dfw[i].real, + hnl[i], + 14)); + efw[i].imag = (int16_t) + (WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(dfw[i].imag, + hnl[i], + 14)); + } + } + + if (aecm->cngMode == AecmTrue) { + ComfortNoise(aecm, ptrDfaClean, efw, hnl); + } + + InverseFFTAndWindow(aecm, fft, efw, output, nearendClean); + + return 0; +} + +// Generate comfort noise and add to output signal. +static void ComfortNoise(AecmCore_t* aecm, + const uint16_t* dfa, + complex16_t* out, + const int16_t* lambda) { + int16_t i; + int16_t tmp16, tmp161, tmp162, tmp163, nrsh1, nrsh2; + int32_t tmp32, tmp321, tnoise, tnoise1; + int32_t tmp322, tmp323, *tmp1; + int16_t* dfap; + int16_t* lambdap; + const int32_t c2049 = 2049; + const int32_t c359 = 359; + const int32_t c114 = ONE_Q14; + + int16_t randW16[PART_LEN]; + int16_t uReal[PART_LEN1]; + int16_t uImag[PART_LEN1]; + int32_t outLShift32; + + int16_t shiftFromNearToNoise = kNoiseEstQDomain - aecm->dfaCleanQDomain; + int16_t minTrackShift = 9; + + assert(shiftFromNearToNoise >= 0); + assert(shiftFromNearToNoise < 16); + + if (aecm->noiseEstCtr < 100) { + // Track the minimum more quickly initially. + aecm->noiseEstCtr++; + minTrackShift = 6; + } + + // Generate a uniform random array on [0 2^15-1]. + WebRtcSpl_RandUArray(randW16, PART_LEN, &aecm->seed); + int16_t* randW16p = (int16_t*)randW16; +#if defined (MIPS_DSP_R1_LE) + int16_t* kCosTablep = (int16_t*)WebRtcAecm_kCosTable; + int16_t* kSinTablep = (int16_t*)WebRtcAecm_kSinTable; +#endif // #if defined(MIPS_DSP_R1_LE) + tmp1 = (int32_t*)aecm->noiseEst + 1; + dfap = (int16_t*)dfa + 1; + lambdap = (int16_t*)lambda + 1; + // Estimate noise power. + for (i = 1; i < PART_LEN1; i+=2) { + // Shift to the noise domain. + __asm __volatile ( + "lh %[tmp32], 0(%[dfap]) \n\t" + "lw %[tnoise], 0(%[tmp1]) \n\t" + "sllv %[outLShift32], %[tmp32], %[shiftFromNearToNoise] \n\t" + : [tmp32] "=&r" (tmp32), [outLShift32] "=r" (outLShift32), + [tnoise] "=&r" (tnoise) + : [tmp1] "r" (tmp1), [dfap] "r" (dfap), + [shiftFromNearToNoise] "r" (shiftFromNearToNoise) + : "memory" + ); + + if (outLShift32 < tnoise) { + // Reset "too low" counter + aecm->noiseEstTooLowCtr[i] = 0; + // Track the minimum. + if (tnoise < (1 << minTrackShift)) { + // For small values, decrease noiseEst[i] every + // |kNoiseEstIncCount| block. The regular approach below can not + // go further down due to truncation. + aecm->noiseEstTooHighCtr[i]++; + if (aecm->noiseEstTooHighCtr[i] >= kNoiseEstIncCount) { + tnoise--; + aecm->noiseEstTooHighCtr[i] = 0; // Reset the counter + } + } else { + __asm __volatile ( + "subu %[tmp32], %[tnoise], %[outLShift32] \n\t" + "srav %[tmp32], %[tmp32], %[minTrackShift] \n\t" + "subu %[tnoise], %[tnoise], %[tmp32] \n\t" + : [tmp32] "=&r" (tmp32), [tnoise] "+r" (tnoise) + : [outLShift32] "r" (outLShift32), [minTrackShift] "r" (minTrackShift) + ); + } + } else { + // Reset "too high" counter + aecm->noiseEstTooHighCtr[i] = 0; + // Ramp slowly upwards until we hit the minimum again. + if ((tnoise >> 19) <= 0) { + if ((tnoise >> 11) > 0) { + // Large enough for relative increase + __asm __volatile ( + "mul %[tnoise], %[tnoise], %[c2049] \n\t" + "sra %[tnoise], %[tnoise], 11 \n\t" + : [tnoise] "+r" (tnoise) + : [c2049] "r" (c2049) + : "hi", "lo" + ); + } else { + // Make incremental increases based on size every + // |kNoiseEstIncCount| block + aecm->noiseEstTooLowCtr[i]++; + if (aecm->noiseEstTooLowCtr[i] >= kNoiseEstIncCount) { + __asm __volatile ( + "sra %[tmp32], %[tnoise], 9 \n\t" + "addi %[tnoise], %[tnoise], 1 \n\t" + "addu %[tnoise], %[tnoise], %[tmp32] \n\t" + : [tnoise] "+r" (tnoise), [tmp32] "=&r" (tmp32) + : + ); + aecm->noiseEstTooLowCtr[i] = 0; // Reset counter + } + } + } else { + // Avoid overflow. + // Multiplication with 2049 will cause wrap around. Scale + // down first and then multiply + __asm __volatile ( + "sra %[tnoise], %[tnoise], 11 \n\t" + "mul %[tnoise], %[tnoise], %[c2049] \n\t" + : [tnoise] "+r" (tnoise) + : [c2049] "r" (c2049) + : "hi", "lo" + ); + } + } + + // Shift to the noise domain. + __asm __volatile ( + "lh %[tmp32], 2(%[dfap]) \n\t" + "lw %[tnoise1], 4(%[tmp1]) \n\t" + "addiu %[dfap], %[dfap], 4 \n\t" + "sllv %[outLShift32], %[tmp32], %[shiftFromNearToNoise] \n\t" + : [tmp32] "=&r" (tmp32), [dfap] "+r" (dfap), + [outLShift32] "=r" (outLShift32), [tnoise1] "=&r" (tnoise1) + : [tmp1] "r" (tmp1), [shiftFromNearToNoise] "r" (shiftFromNearToNoise) + : "memory" + ); + + if (outLShift32 < tnoise1) { + // Reset "too low" counter + aecm->noiseEstTooLowCtr[i + 1] = 0; + // Track the minimum. + if (tnoise1 < (1 << minTrackShift)) { + // For small values, decrease noiseEst[i] every + // |kNoiseEstIncCount| block. The regular approach below can not + // go further down due to truncation. + aecm->noiseEstTooHighCtr[i + 1]++; + if (aecm->noiseEstTooHighCtr[i + 1] >= kNoiseEstIncCount) { + tnoise1--; + aecm->noiseEstTooHighCtr[i + 1] = 0; // Reset the counter + } + } else { + __asm __volatile ( + "subu %[tmp32], %[tnoise1], %[outLShift32] \n\t" + "srav %[tmp32], %[tmp32], %[minTrackShift] \n\t" + "subu %[tnoise1], %[tnoise1], %[tmp32] \n\t" + : [tmp32] "=&r" (tmp32), [tnoise1] "+r" (tnoise1) + : [outLShift32] "r" (outLShift32), [minTrackShift] "r" (minTrackShift) + ); + } + } else { + // Reset "too high" counter + aecm->noiseEstTooHighCtr[i + 1] = 0; + // Ramp slowly upwards until we hit the minimum again. + if ((tnoise1 >> 19) <= 0) { + if ((tnoise1 >> 11) > 0) { + // Large enough for relative increase + __asm __volatile ( + "mul %[tnoise1], %[tnoise1], %[c2049] \n\t" + "sra %[tnoise1], %[tnoise1], 11 \n\t" + : [tnoise1] "+r" (tnoise1) + : [c2049] "r" (c2049) + : "hi", "lo" + ); + } else { + // Make incremental increases based on size every + // |kNoiseEstIncCount| block + aecm->noiseEstTooLowCtr[i + 1]++; + if (aecm->noiseEstTooLowCtr[i + 1] >= kNoiseEstIncCount) { + __asm __volatile ( + "sra %[tmp32], %[tnoise1], 9 \n\t" + "addi %[tnoise1], %[tnoise1], 1 \n\t" + "addu %[tnoise1], %[tnoise1], %[tmp32] \n\t" + : [tnoise1] "+r" (tnoise1), [tmp32] "=&r" (tmp32) + : + ); + aecm->noiseEstTooLowCtr[i + 1] = 0; // Reset counter + } + } + } else { + // Avoid overflow. + // Multiplication with 2049 will cause wrap around. Scale + // down first and then multiply + __asm __volatile ( + "sra %[tnoise1], %[tnoise1], 11 \n\t" + "mul %[tnoise1], %[tnoise1], %[c2049] \n\t" + : [tnoise1] "+r" (tnoise1) + : [c2049] "r" (c2049) + : "hi", "lo" + ); + } + } + + __asm __volatile ( + "lh %[tmp16], 0(%[lambdap]) \n\t" + "lh %[tmp161], 2(%[lambdap]) \n\t" + "sw %[tnoise], 0(%[tmp1]) \n\t" + "sw %[tnoise1], 4(%[tmp1]) \n\t" + "subu %[tmp16], %[c114], %[tmp16] \n\t" + "subu %[tmp161], %[c114], %[tmp161] \n\t" + "srav %[tmp32], %[tnoise], %[shiftFromNearToNoise] \n\t" + "srav %[tmp321], %[tnoise1], %[shiftFromNearToNoise] \n\t" + "addiu %[lambdap], %[lambdap], 4 \n\t" + "addiu %[tmp1], %[tmp1], 8 \n\t" + : [tmp16] "=&r" (tmp16), [tmp161] "=&r" (tmp161), [tmp1] "+r" (tmp1), + [tmp32] "=&r" (tmp32), [tmp321] "=&r" (tmp321), [lambdap] "+r" (lambdap) + : [tnoise] "r" (tnoise), [tnoise1] "r" (tnoise1), [c114] "r" (c114), + [shiftFromNearToNoise] "r" (shiftFromNearToNoise) + : "memory" + ); + + if (tmp32 > 32767) { + tmp32 = 32767; + aecm->noiseEst[i] = WEBRTC_SPL_LSHIFT_W32(tmp32, shiftFromNearToNoise); + } + if (tmp321 > 32767) { + tmp321 = 32767; + aecm->noiseEst[i+1] = WEBRTC_SPL_LSHIFT_W32(tmp321, shiftFromNearToNoise); + } + + __asm __volatile ( + "mul %[tmp32], %[tmp32], %[tmp16] \n\t" + "mul %[tmp321], %[tmp321], %[tmp161] \n\t" + "sra %[nrsh1], %[tmp32], 14 \n\t" + "sra %[nrsh2], %[tmp321], 14 \n\t" + : [nrsh1] "=r" (nrsh1), [nrsh2] "=r" (nrsh2) + : [tmp16] "r" (tmp16), [tmp161] "r" (tmp161), [tmp32] "r" (tmp32), + [tmp321] "r" (tmp321) + : "memory", "hi", "lo" + ); + + __asm __volatile ( + "lh %[tmp32], 0(%[randW16p]) \n\t" + "lh %[tmp321], 2(%[randW16p]) \n\t" + "addiu %[randW16p], %[randW16p], 4 \n\t" + "mul %[tmp32], %[tmp32], %[c359] \n\t" + "mul %[tmp321], %[tmp321], %[c359] \n\t" + "sra %[tmp16], %[tmp32], 15 \n\t" + "sra %[tmp161], %[tmp321], 15 \n\t" + : [randW16p] "+r" (randW16p), [tmp32] "=&r" (tmp32), + [tmp16] "=r" (tmp16), [tmp161] "=r" (tmp161), [tmp321] "=&r" (tmp321) + : [c359] "r" (c359) + : "memory", "hi", "lo" + ); + +#if !defined(MIPS_DSP_R1_LE) + tmp32 = WebRtcAecm_kCosTable[tmp16]; + tmp321 = WebRtcAecm_kSinTable[tmp16]; + tmp322 = WebRtcAecm_kCosTable[tmp161]; + tmp323 = WebRtcAecm_kSinTable[tmp161]; +#else + __asm __volatile ( + "sll %[tmp16], %[tmp16], 1 \n\t" + "sll %[tmp161], %[tmp161], 1 \n\t" + "lhx %[tmp32], %[tmp16](%[kCosTablep]) \n\t" + "lhx %[tmp321], %[tmp16](%[kSinTablep]) \n\t" + "lhx %[tmp322], %[tmp161](%[kCosTablep]) \n\t" + "lhx %[tmp323], %[tmp161](%[kSinTablep]) \n\t" + : [tmp32] "=&r" (tmp32), [tmp321] "=&r" (tmp321), + [tmp322] "=&r" (tmp322), [tmp323] "=&r" (tmp323) + : [kCosTablep] "r" (kCosTablep), [tmp16] "r" (tmp16), + [tmp161] "r" (tmp161), [kSinTablep] "r" (kSinTablep) + : "memory" + ); +#endif + __asm __volatile ( + "mul %[tmp32], %[tmp32], %[nrsh1] \n\t" + "negu %[tmp162], %[nrsh1] \n\t" + "mul %[tmp322], %[tmp322], %[nrsh2] \n\t" + "negu %[tmp163], %[nrsh2] \n\t" + "sra %[tmp32], %[tmp32], 13 \n\t" + "mul %[tmp321], %[tmp321], %[tmp162] \n\t" + "sra %[tmp322], %[tmp322], 13 \n\t" + "mul %[tmp323], %[tmp323], %[tmp163] \n\t" + "sra %[tmp321], %[tmp321], 13 \n\t" + "sra %[tmp323], %[tmp323], 13 \n\t" + : [tmp32] "+r" (tmp32), [tmp321] "+r" (tmp321), [tmp162] "=&r" (tmp162), + [tmp322] "+r" (tmp322), [tmp323] "+r" (tmp323), [tmp163] "=&r" (tmp163) + : [nrsh1] "r" (nrsh1), [nrsh2] "r" (nrsh2) + : "hi", "lo" + ); + // Tables are in Q13. + uReal[i] = (int16_t)tmp32; + uImag[i] = (int16_t)tmp321; + uReal[i + 1] = (int16_t)tmp322; + uImag[i + 1] = (int16_t)tmp323; + } + + int32_t tt, sgn; + tt = out[0].real; + sgn = ((int)tt) >> 31; + out[0].real = sgn == (int16_t)(tt >> 15) ? (int16_t)tt : (16384 ^ sgn); + tt = out[0].imag; + sgn = ((int)tt) >> 31; + out[0].imag = sgn == (int16_t)(tt >> 15) ? (int16_t)tt : (16384 ^ sgn); + for (i = 1; i < PART_LEN; i++) { + tt = out[i].real + uReal[i]; + sgn = ((int)tt) >> 31; + out[i].real = sgn == (int16_t)(tt >> 15) ? (int16_t)tt : (16384 ^ sgn); + tt = out[i].imag + uImag[i]; + sgn = ((int)tt) >> 31; + out[i].imag = sgn == (int16_t)(tt >> 15) ? (int16_t)tt : (16384 ^ sgn); + } + tt = out[PART_LEN].real + uReal[PART_LEN]; + sgn = ((int)tt) >> 31; + out[PART_LEN].real = sgn == (int16_t)(tt >> 15) ? (int16_t)tt : (16384 ^ sgn); + tt = out[PART_LEN].imag; + sgn = ((int)tt) >> 31; + out[PART_LEN].imag = sgn == (int16_t)(tt >> 15) ? (int16_t)tt : (16384 ^ sgn); +} + diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/audio_processing.gypi b/media/webrtc/trunk/webrtc/modules/audio_processing/audio_processing.gypi index 357ee495b842..0eff063dd954 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/audio_processing.gypi +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/audio_processing.gypi @@ -7,6 +7,13 @@ # be found in the AUTHORS file in the root of the source tree. { + 'variables': { + 'audio_processing_dependencies': [ + '<(webrtc_root)/common_audio/common_audio.gyp:common_audio', + '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', + ], + 'shared_generated_dir': '<(SHARED_INTERMEDIATE_DIR)/audio_processing/asm_offsets', + }, 'targets': [ { 'target_name': 'audio_processing', @@ -14,26 +21,15 @@ 'variables': { # Outputs some low-level debug files. 'aec_debug_dump%': 0, + + # Disables the usual mode where we trust the reported system delay + # values the AEC receives. The corresponding define is set appropriately + # in the code, but it can be force-enabled here for testing. + 'aec_untrusted_delay_for_testing%': 0, }, 'dependencies': [ - '<(webrtc_root)/common_audio/common_audio.gyp:common_audio', - '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', + '<@(audio_processing_dependencies)', ], - 'include_dirs': [ - '../interface', - 'aec/include', - 'aecm/include', - 'agc/include', - 'include', - 'ns/include', - 'utility', - ], - 'direct_dependent_settings': { - 'include_dirs': [ - '../interface', - 'include', - ], - }, 'sources': [ 'aec/include/echo_cancellation.h', 'aec/echo_cancellation.c', @@ -72,10 +68,10 @@ 'level_estimator_impl.h', 'noise_suppression_impl.cc', 'noise_suppression_impl.h', - 'splitting_filter.cc', - 'splitting_filter.h', 'processing_component.cc', 'processing_component.h', + 'typing_detection.cc', + 'typing_detection.h', 'utility/delay_estimator.c', 'utility/delay_estimator.h', 'utility/delay_estimator_internal.h', @@ -92,6 +88,9 @@ ['aec_debug_dump==1', { 'defines': ['WEBRTC_AEC_DEBUG_DUMP',], }], + ['aec_untrusted_delay_for_testing==1', { + 'defines': ['WEBRTC_UNTRUSTED_DELAY',], + }], ['enable_protobuf==1', { 'dependencies': ['audioproc_debug_proto'], 'defines': ['WEBRTC_AUDIOPROC_DEBUG_DUMP'], @@ -105,6 +104,17 @@ 'ns/nsx_core.h', 'ns/nsx_defines.h', ], + 'conditions': [ + ['target_arch=="mipsel"', { + 'sources': [ + 'ns/nsx_core_mips.c', + ], + }, { + 'sources': [ + 'ns/nsx_core_c.c', + ], + }], + ], }, { 'defines': ['WEBRTC_NS_FLOAT'], 'sources': [ @@ -119,9 +129,18 @@ ['target_arch=="ia32" or target_arch=="x64"', { 'dependencies': ['audio_processing_sse2',], }], - ['(target_arch=="arm" and armv7==1) or target_arch=="armv7"', { + ['(target_arch=="arm" and arm_version==7) or target_arch=="armv7"', { 'dependencies': ['audio_processing_neon',], }], + ['target_arch=="mipsel"', { + 'sources': [ + 'aecm/aecm_core_mips.c', + ], + }, { + 'sources': [ + 'aecm/aecm_core_c.c', + ], + }], ], # TODO(jschuh): Bug 1348: fix size_t to int truncations. 'msvs_disabled_warnings': [ 4267, ], @@ -162,7 +181,7 @@ }, ], }], - ['(target_arch=="arm" and armv7==1) or target_arch=="armv7"', { + ['(target_arch=="arm" and arm_version==7) or target_arch=="armv7"', { 'targets': [{ 'target_name': 'audio_processing_neon', 'type': 'static_library', @@ -177,7 +196,7 @@ 'conditions': [ ['OS=="android" or OS=="ios"', { 'dependencies': [ - 'audio_processing_offsets', + '<(gen_core_neon_offsets_gyp):*', ], # # We disable the ASM source, because our gyp->Makefile translator @@ -186,6 +205,9 @@ 'aecm/aecm_core_neon.S', 'ns/nsx_core_neon.S', ], + 'include_dirs': [ + '<(shared_generated_dir)', + ], 'sources': [ 'aecm/aecm_core_neon.c', 'ns/nsx_core_neon.c', @@ -194,22 +216,6 @@ }], ], }], - 'conditions': [ - ['OS=="android" or OS=="ios"', { - 'targets': [{ - 'target_name': 'audio_processing_offsets', - 'type': 'none', - 'sources': [ - 'aecm/aecm_core_neon_offsets.c', - 'ns/nsx_core_neon_offsets.c', - ], - 'variables': { - 'asm_header_dir': 'asm_offsets', - }, - 'includes': ['../../build/generate_asm_header.gypi',], - }], - }], - ], }], ], } diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/audio_processing_impl.cc b/media/webrtc/trunk/webrtc/modules/audio_processing/audio_processing_impl.cc index edf20bc2f29f..48297fcd060f 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/audio_processing_impl.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/audio_processing_impl.cc @@ -12,6 +12,7 @@ #include +#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h" #include "webrtc/modules/audio_processing/audio_buffer.h" #include "webrtc/modules/audio_processing/echo_cancellation_impl_wrapper.h" #include "webrtc/modules/audio_processing/echo_control_mobile_impl.h" @@ -20,9 +21,9 @@ #include "webrtc/modules/audio_processing/level_estimator_impl.h" #include "webrtc/modules/audio_processing/noise_suppression_impl.h" #include "webrtc/modules/audio_processing/processing_component.h" -#include "webrtc/modules/audio_processing/splitting_filter.h" #include "webrtc/modules/audio_processing/voice_detection_impl.h" #include "webrtc/modules/interface/module_common_types.h" +#include "webrtc/system_wrappers/interface/compile_assert.h" #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" #include "webrtc/system_wrappers/interface/file_wrapper.h" #include "webrtc/system_wrappers/interface/logging.h" @@ -36,9 +37,32 @@ #endif #endif // WEBRTC_AUDIOPROC_DEBUG_DUMP +static const int kChunkSizeMs = 10; + +#define RETURN_ON_ERR(expr) \ + do { \ + int err = expr; \ + if (err != kNoError) { \ + return err; \ + } \ + } while (0) + namespace webrtc { + +// Throughout webrtc, it's assumed that success is represented by zero. +COMPILE_ASSERT(AudioProcessing::kNoError == 0, no_error_must_be_zero); + AudioProcessing* AudioProcessing::Create(int id) { - AudioProcessingImpl* apm = new AudioProcessingImpl(id); + return Create(); +} + +AudioProcessing* AudioProcessing::Create() { + Config config; + return Create(config); +} + +AudioProcessing* AudioProcessing::Create(const Config& config) { + AudioProcessingImpl* apm = new AudioProcessingImpl(config); if (apm->Initialize() != kNoError) { delete apm; apm = NULL; @@ -47,12 +71,8 @@ AudioProcessing* AudioProcessing::Create(int id) { return apm; } -int32_t AudioProcessing::TimeUntilNextProcess() { return -1; } -int32_t AudioProcessing::Process() { return -1; } - -AudioProcessingImpl::AudioProcessingImpl(int id) - : id_(id), - echo_cancellation_(NULL), +AudioProcessingImpl::AudioProcessingImpl(const Config& config) + : echo_cancellation_(NULL), echo_control_mobile_(NULL), gain_control_(NULL), high_pass_filter_(NULL), @@ -68,13 +88,15 @@ AudioProcessingImpl::AudioProcessingImpl(int id) #endif sample_rate_hz_(kSampleRate16kHz), split_sample_rate_hz_(kSampleRate16kHz), - samples_per_channel_(sample_rate_hz_ / 100), + samples_per_channel_(kChunkSizeMs * sample_rate_hz_ / 1000), stream_delay_ms_(0), delay_offset_ms_(0), was_stream_delay_set_(false), num_reverse_channels_(1), num_input_channels_(1), - num_output_channels_(1) { + num_output_channels_(1), + output_will_be_muted_(false), + key_pressed_(false) { echo_cancellation_ = EchoCancellationImplWrapper::Create(this); component_list_.push_back(echo_cancellation_); @@ -95,6 +117,8 @@ AudioProcessingImpl::AudioProcessingImpl(int id) voice_detection_ = new VoiceDetectionImpl(this); component_list_.push_back(voice_detection_); + + SetExtraOptions(config); } AudioProcessingImpl::~AudioProcessingImpl() { @@ -157,8 +181,6 @@ int AudioProcessingImpl::InitializeLocked() { capture_audio_ = new AudioBuffer(num_input_channels_, samples_per_channel_); - was_stream_delay_set_ = false; - // Initialize all components. std::list::iterator it; for (it = component_list_.begin(); it != component_list_.end(); ++it) { @@ -181,11 +203,16 @@ int AudioProcessingImpl::InitializeLocked() { } void AudioProcessingImpl::SetExtraOptions(const Config& config) { + CriticalSectionScoped crit_scoped(crit_); std::list::iterator it; for (it = component_list_.begin(); it != component_list_.end(); ++it) (*it)->SetExtraOptions(config); } +int AudioProcessingImpl::EnableExperimentalNs(bool enable) { + return kNoError; +} + int AudioProcessingImpl::set_sample_rate_hz(int rate) { CriticalSectionScoped crit_scoped(crit_); if (rate == sample_rate_hz_) { @@ -268,6 +295,57 @@ int AudioProcessingImpl::num_output_channels() const { return num_output_channels_; } +void AudioProcessingImpl::set_output_will_be_muted(bool muted) { + output_will_be_muted_ = muted; +} + +bool AudioProcessingImpl::output_will_be_muted() const { + return output_will_be_muted_; +} + +int AudioProcessingImpl::MaybeInitializeLocked(int sample_rate_hz, + int num_input_channels, int num_output_channels, int num_reverse_channels) { + if (sample_rate_hz == sample_rate_hz_ && + num_input_channels == num_input_channels_ && + num_output_channels == num_output_channels_ && + num_reverse_channels == num_reverse_channels_) { + return kNoError; + } + + if (sample_rate_hz != kSampleRate8kHz && + sample_rate_hz != kSampleRate16kHz && + sample_rate_hz != kSampleRate32kHz) { + return kBadSampleRateError; + } + if (num_output_channels > num_input_channels) { + return kBadNumberChannelsError; + } + // Only mono and stereo supported currently. + if (num_input_channels > 2 || num_input_channels < 1 || + num_output_channels > 2 || num_output_channels < 1 || + num_reverse_channels > 2 || num_reverse_channels < 1) { + return kBadNumberChannelsError; + } + if (echo_control_mobile_->is_enabled() && sample_rate_hz > kSampleRate16kHz) { + LOG(LS_ERROR) << "AECM only supports 16 or 8 kHz sample rates"; + return kUnsupportedComponentError; + } + + sample_rate_hz_ = sample_rate_hz; + samples_per_channel_ = kChunkSizeMs * sample_rate_hz / 1000; + num_input_channels_ = num_input_channels; + num_output_channels_ = num_output_channels; + num_reverse_channels_ = num_reverse_channels; + + if (sample_rate_hz_ == kSampleRate32kHz) { + split_sample_rate_hz_ = kSampleRate16kHz; + } else { + split_sample_rate_hz_ = sample_rate_hz_; + } + + return InitializeLocked(); +} + int AudioProcessingImpl::ProcessStream(AudioFrame* frame) { CriticalSectionScoped crit_scoped(crit_); int err = kNoError; @@ -275,15 +353,10 @@ int AudioProcessingImpl::ProcessStream(AudioFrame* frame) { if (frame == NULL) { return kNullPointerError; } - - if (frame->sample_rate_hz_ != sample_rate_hz_) { - return kBadSampleRateError; - } - - if (frame->num_channels_ != num_input_channels_) { - return kBadNumberChannelsError; - } - + // TODO(ajm): We now always set the output channels equal to the input + // channels here. Remove the ability to downmix entirely. + RETURN_ON_ERR(MaybeInitializeLocked(frame->sample_rate_hz_, + frame->num_channels_, frame->num_channels_, num_reverse_channels_)); if (frame->samples_per_channel_ != samples_per_channel_) { return kBadDataLengthError; } @@ -299,6 +372,7 @@ int AudioProcessingImpl::ProcessStream(AudioFrame* frame) { msg->set_delay(stream_delay_ms_); msg->set_drift(echo_cancellation_->stream_drift_samples()); msg->set_level(gain_control_->stream_analog_level()); + msg->set_keypress(key_pressed_); } #endif @@ -314,11 +388,12 @@ int AudioProcessingImpl::ProcessStream(AudioFrame* frame) { if (analysis_needed(data_processed)) { for (int i = 0; i < num_output_channels_; i++) { // Split into a low and high band. - SplittingFilterAnalysis(capture_audio_->data(i), - capture_audio_->low_pass_split_data(i), - capture_audio_->high_pass_split_data(i), - capture_audio_->analysis_filter_state1(i), - capture_audio_->analysis_filter_state2(i)); + WebRtcSpl_AnalysisQMF(capture_audio_->data(i), + capture_audio_->samples_per_channel(), + capture_audio_->low_pass_split_data(i), + capture_audio_->high_pass_split_data(i), + capture_audio_->analysis_filter_state1(i), + capture_audio_->analysis_filter_state2(i)); } } @@ -365,11 +440,12 @@ int AudioProcessingImpl::ProcessStream(AudioFrame* frame) { if (synthesis_needed(data_processed)) { for (int i = 0; i < num_output_channels_; i++) { // Recombine low and high bands. - SplittingFilterSynthesis(capture_audio_->low_pass_split_data(i), - capture_audio_->high_pass_split_data(i), - capture_audio_->data(i), - capture_audio_->synthesis_filter_state1(i), - capture_audio_->synthesis_filter_state2(i)); + WebRtcSpl_SynthesisQMF(capture_audio_->low_pass_split_data(i), + capture_audio_->high_pass_split_data(i), + capture_audio_->samples_per_split_channel(), + capture_audio_->data(i), + capture_audio_->synthesis_filter_state1(i), + capture_audio_->synthesis_filter_state2(i)); } } @@ -399,25 +475,21 @@ int AudioProcessingImpl::ProcessStream(AudioFrame* frame) { return kNoError; } +// TODO(ajm): Have AnalyzeReverseStream accept sample rates not matching the +// primary stream and convert ourselves rather than having the user manage it. +// We can be smarter and use the splitting filter when appropriate. Similarly, +// perform downmixing here. int AudioProcessingImpl::AnalyzeReverseStream(AudioFrame* frame) { CriticalSectionScoped crit_scoped(crit_); int err = kNoError; - if (frame == NULL) { return kNullPointerError; } - if (frame->sample_rate_hz_ != sample_rate_hz_) { return kBadSampleRateError; } - - if (frame->num_channels_ != num_reverse_channels_) { - return kBadNumberChannelsError; - } - - if (frame->samples_per_channel_ != samples_per_channel_) { - return kBadDataLengthError; - } + RETURN_ON_ERR(MaybeInitializeLocked(sample_rate_hz_, num_input_channels_, + num_output_channels_, frame->num_channels_)); #ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP if (debug_file_->Open()) { @@ -436,15 +508,15 @@ int AudioProcessingImpl::AnalyzeReverseStream(AudioFrame* frame) { render_audio_->DeinterleaveFrom(frame); - // TODO(ajm): turn the splitting filter into a component? if (sample_rate_hz_ == kSampleRate32kHz) { for (int i = 0; i < num_reverse_channels_; i++) { // Split into low and high band. - SplittingFilterAnalysis(render_audio_->data(i), - render_audio_->low_pass_split_data(i), - render_audio_->high_pass_split_data(i), - render_audio_->analysis_filter_state1(i), - render_audio_->analysis_filter_state2(i)); + WebRtcSpl_AnalysisQMF(render_audio_->data(i), + render_audio_->samples_per_channel(), + render_audio_->low_pass_split_data(i), + render_audio_->high_pass_split_data(i), + render_audio_->analysis_filter_state1(i), + render_audio_->analysis_filter_state2(i)); } } @@ -504,6 +576,14 @@ int AudioProcessingImpl::delay_offset_ms() const { return delay_offset_ms_; } +void AudioProcessingImpl::set_stream_key_pressed(bool key_pressed) { + key_pressed_ = key_pressed; +} + +bool AudioProcessingImpl::stream_key_pressed() const { + return key_pressed_; +} + int AudioProcessingImpl::StartDebugRecording( const char filename[AudioProcessing::kMaxFilenameSize]) { CriticalSectionScoped crit_scoped(crit_); @@ -536,6 +616,35 @@ int AudioProcessingImpl::StartDebugRecording( #endif // WEBRTC_AUDIOPROC_DEBUG_DUMP } +int AudioProcessingImpl::StartDebugRecording(FILE* handle) { + CriticalSectionScoped crit_scoped(crit_); + + if (handle == NULL) { + return kNullPointerError; + } + +#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP + // Stop any ongoing recording. + if (debug_file_->Open()) { + if (debug_file_->CloseFile() == -1) { + return kFileError; + } + } + + if (debug_file_->OpenFromFileHandle(handle, true, false) == -1) { + return kFileError; + } + + int err = WriteInitMessage(); + if (err != kNoError) { + return err; + } + return kNoError; +#else + return kUnsupportedFunctionError; +#endif // WEBRTC_AUDIOPROC_DEBUG_DUMP +} + int AudioProcessingImpl::StopDebugRecording() { CriticalSectionScoped crit_scoped(crit_); @@ -580,13 +689,6 @@ VoiceDetection* AudioProcessingImpl::voice_detection() const { return voice_detection_; } -int32_t AudioProcessingImpl::ChangeUniqueId(const int32_t id) { - CriticalSectionScoped crit_scoped(crit_); - id_ = id; - - return kNoError; -} - bool AudioProcessingImpl::is_data_processed() const { int enabled_count = 0; std::list::const_iterator it; @@ -638,7 +740,7 @@ int AudioProcessingImpl::WriteMessageToDebugFile() { if (size <= 0) { return kUnspecifiedError; } -#if defined(WEBRTC_BIG_ENDIAN) +#if defined(WEBRTC_ARCH_BIG_ENDIAN) // TODO(ajm): Use little-endian "on the wire". For the moment, we can be // pretty safe in assuming little-endian. #endif diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/audio_processing_impl.h b/media/webrtc/trunk/webrtc/modules/audio_processing/audio_processing_impl.h index b0afd6d33044..09e2192a6434 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/audio_processing_impl.h +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/audio_processing_impl.h @@ -47,7 +47,7 @@ class AudioProcessingImpl : public AudioProcessing { kSampleRate32kHz = 32000 }; - explicit AudioProcessingImpl(int id); + explicit AudioProcessingImpl(const Config& config); virtual ~AudioProcessingImpl(); CriticalSectionWrapper* crit() const; @@ -57,8 +57,11 @@ class AudioProcessingImpl : public AudioProcessing { // AudioProcessing methods. virtual int Initialize() OVERRIDE; - virtual int InitializeLocked(); virtual void SetExtraOptions(const Config& config) OVERRIDE; + virtual int EnableExperimentalNs(bool enable) OVERRIDE; + virtual bool experimental_ns_enabled() const OVERRIDE { + return false; + } virtual int set_sample_rate_hz(int rate) OVERRIDE; virtual int sample_rate_hz() const OVERRIDE; virtual int set_num_channels(int input_channels, @@ -67,14 +70,19 @@ class AudioProcessingImpl : public AudioProcessing { virtual int num_output_channels() const OVERRIDE; virtual int set_num_reverse_channels(int channels) OVERRIDE; virtual int num_reverse_channels() const OVERRIDE; + virtual void set_output_will_be_muted(bool muted) OVERRIDE; + virtual bool output_will_be_muted() const OVERRIDE; virtual int ProcessStream(AudioFrame* frame) OVERRIDE; virtual int AnalyzeReverseStream(AudioFrame* frame) OVERRIDE; virtual int set_stream_delay_ms(int delay) OVERRIDE; virtual int stream_delay_ms() const OVERRIDE; virtual void set_delay_offset_ms(int offset) OVERRIDE; virtual int delay_offset_ms() const OVERRIDE; + virtual void set_stream_key_pressed(bool key_pressed) OVERRIDE; + virtual bool stream_key_pressed() const OVERRIDE; virtual int StartDebugRecording( const char filename[kMaxFilenameSize]) OVERRIDE; + virtual int StartDebugRecording(FILE* handle) OVERRIDE; virtual int StopDebugRecording() OVERRIDE; virtual EchoCancellation* echo_cancellation() const OVERRIDE; virtual EchoControlMobile* echo_control_mobile() const OVERRIDE; @@ -84,17 +92,17 @@ class AudioProcessingImpl : public AudioProcessing { virtual NoiseSuppression* noise_suppression() const OVERRIDE; virtual VoiceDetection* voice_detection() const OVERRIDE; - // Module methods. - virtual int32_t ChangeUniqueId(const int32_t id) OVERRIDE; + protected: + virtual int InitializeLocked(); private: + int MaybeInitializeLocked(int sample_rate_hz, int num_input_channels, + int num_output_channels, int num_reverse_channels); bool is_data_processed() const; bool interleave_needed(bool is_data_processed) const; bool synthesis_needed(bool is_data_processed) const; bool analysis_needed(bool is_data_processed) const; - int id_; - EchoCancellationImplWrapper* echo_cancellation_; EchoControlMobileImpl* echo_control_mobile_; GainControlImpl* gain_control_; @@ -113,8 +121,8 @@ class AudioProcessingImpl : public AudioProcessing { int WriteMessageToDebugFile(); int WriteInitMessage(); scoped_ptr debug_file_; - scoped_ptr event_msg_; // Protobuf message. - std::string event_str_; // Memory for protobuf serialization. + scoped_ptr event_msg_; // Protobuf message. + std::string event_str_; // Memory for protobuf serialization. #endif int sample_rate_hz_; @@ -127,6 +135,9 @@ class AudioProcessingImpl : public AudioProcessing { int num_reverse_channels_; int num_input_channels_; int num_output_channels_; + bool output_will_be_muted_; + + bool key_pressed_; }; } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/audio_processing_impl_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_processing/audio_processing_impl_unittest.cc new file mode 100644 index 000000000000..a10fd5e29aca --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/audio_processing_impl_unittest.cc @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/audio_processing/audio_processing_impl.h" + +#include "testing/gmock/include/gmock/gmock.h" +#include "testing/gtest/include/gtest/gtest.h" +#include "webrtc/modules/audio_processing/test/test_utils.h" +#include "webrtc/modules/interface/module_common_types.h" + +using ::testing::Invoke; +using ::testing::Return; + +namespace webrtc { + +class MockInitialize : public AudioProcessingImpl { + public: + explicit MockInitialize(const Config& config) : AudioProcessingImpl(config) { + } + + MOCK_METHOD0(InitializeLocked, int()); + int RealInitializeLocked() { return AudioProcessingImpl::InitializeLocked(); } +}; + +TEST(AudioProcessingImplTest, AudioParameterChangeTriggersInit) { + Config config; + MockInitialize mock(config); + ON_CALL(mock, InitializeLocked()) + .WillByDefault(Invoke(&mock, &MockInitialize::RealInitializeLocked)); + + EXPECT_CALL(mock, InitializeLocked()).Times(1); + mock.Initialize(); + + AudioFrame frame; + // Call with the default parameters; there should be no init. + frame.num_channels_ = 1; + SetFrameSampleRate(&frame, 16000); + EXPECT_CALL(mock, InitializeLocked()) + .Times(0); + EXPECT_EQ(kNoErr, mock.ProcessStream(&frame)); + EXPECT_EQ(kNoErr, mock.AnalyzeReverseStream(&frame)); + + // New sample rate. (Only impacts ProcessStream). + SetFrameSampleRate(&frame, 32000); + EXPECT_CALL(mock, InitializeLocked()) + .Times(1); + EXPECT_EQ(kNoErr, mock.ProcessStream(&frame)); + + // New number of channels. + frame.num_channels_ = 2; + EXPECT_CALL(mock, InitializeLocked()) + .Times(2); + EXPECT_EQ(kNoErr, mock.ProcessStream(&frame)); + // ProcessStream sets num_channels_ == num_output_channels. + frame.num_channels_ = 2; + EXPECT_EQ(kNoErr, mock.AnalyzeReverseStream(&frame)); + + // A new sample rate passed to AnalyzeReverseStream should be an error and + // not cause an init. + SetFrameSampleRate(&frame, 16000); + EXPECT_CALL(mock, InitializeLocked()) + .Times(0); + EXPECT_EQ(mock.kBadSampleRateError, mock.AnalyzeReverseStream(&frame)); +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/debug.proto b/media/webrtc/trunk/webrtc/modules/audio_processing/debug.proto index 4b3a16389416..fb8e79a27878 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/debug.proto +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/debug.proto @@ -20,6 +20,7 @@ message Stream { optional int32 delay = 3; optional sint32 drift = 4; optional int32 level = 5; + optional bool keypress = 6; } message Event { diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/echo_cancellation_impl.cc b/media/webrtc/trunk/webrtc/modules/audio_processing/echo_cancellation_impl.cc index 7c36872ef4dd..c93bf6ed10b7 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/echo_cancellation_impl.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/echo_cancellation_impl.cc @@ -72,7 +72,7 @@ EchoCancellationImpl::EchoCancellationImpl(const AudioProcessingImpl* apm) was_stream_drift_set_(false), stream_has_echo_(false), delay_logging_enabled_(false), - delay_correction_enabled_(true) {} + delay_correction_enabled_(true) {} // default to long AEC tail in Mozilla EchoCancellationImpl::~EchoCancellationImpl() {} @@ -336,8 +336,6 @@ int EchoCancellationImpl::Initialize() { return err; } - was_stream_drift_set_ = false; - return apm_->kNoError; } diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/echo_cancellation_impl.h b/media/webrtc/trunk/webrtc/modules/audio_processing/echo_cancellation_impl.h index 5d98a0b35316..907657ff3141 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/echo_cancellation_impl.h +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/echo_cancellation_impl.h @@ -14,29 +14,6 @@ #include "webrtc/modules/audio_processing/echo_cancellation_impl_wrapper.h" namespace webrtc { -// Use to enable the delay correction feature. This now engages an extended -// filter mode in the AEC, along with robustness measures around the reported -// system delays. It comes with a significant increase in AEC complexity, but is -// much more robust to unreliable reported delays. -// -// Detailed changes to the algorithm: -// - The filter length is changed from 48 to 128 ms. This comes with tuning of -// several parameters: i) filter adaptation stepsize and error threshold; -// ii) non-linear processing smoothing and overdrive. -// - Option to ignore the reported delays on platforms which we deem -// sufficiently unreliable. See WEBRTC_UNTRUSTED_DELAY in echo_cancellation.c. -// - Faster startup times by removing the excessive "startup phase" processing -// of reported delays. -// - Much more conservative adjustments to the far-end read pointer. We smooth -// the delay difference more heavily, and back off from the difference more. -// Adjustments force a readaptation of the filter, so they should be avoided -// except when really necessary. -struct DelayCorrection { - DelayCorrection() : enabled(false) {} - DelayCorrection(bool enabled) : enabled(enabled) {} - - bool enabled; -}; class AudioProcessingImpl; class AudioBuffer; @@ -57,7 +34,7 @@ class EchoCancellationImpl : public EchoCancellationImplWrapper { // ProcessingComponent implementation. virtual int Initialize() OVERRIDE; - // virtual void SetExtraOptions(const Config& config) OVERRIDE; + // virtual void SetExtraOptions(const Config& config) OVERRIDE; private: // EchoCancellation implementation. diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/echo_cancellation_impl_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_processing/echo_cancellation_impl_unittest.cc index 16ecf02e4bdf..f9bc3213ff1b 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/echo_cancellation_impl_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/echo_cancellation_impl_unittest.cc @@ -12,7 +12,6 @@ extern "C" { #include "webrtc/modules/audio_processing/aec/aec_core.h" } -#include "webrtc/modules/audio_processing/echo_cancellation_impl.h" #include "webrtc/modules/audio_processing/include/audio_processing.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/gain_control_impl.cc b/media/webrtc/trunk/webrtc/modules/audio_processing/gain_control_impl.cc index 35547031e301..a6cd6842560b 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/gain_control_impl.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/gain_control_impl.cc @@ -91,6 +91,7 @@ int GainControlImpl::AnalyzeCaptureAudio(AudioBuffer* audio) { int err = apm_->kNoError; if (mode_ == kAdaptiveAnalog) { + capture_levels_.assign(num_handles(), analog_capture_level_); for (int i = 0; i < num_handles(); i++) { Handle* my_handle = static_cast(handle(i)); err = WebRtcAgc_AddMic( @@ -114,7 +115,6 @@ int GainControlImpl::AnalyzeCaptureAudio(AudioBuffer* audio) { audio->low_pass_split_data(i), audio->high_pass_split_data(i), static_cast(audio->samples_per_split_channel()), - //capture_levels_[i], analog_capture_level_, &capture_level_out); @@ -190,13 +190,6 @@ int GainControlImpl::set_stream_analog_level(int level) { if (level < minimum_capture_level_ || level > maximum_capture_level_) { return apm_->kBadParameterError; } - - if (mode_ == kAdaptiveAnalog) { - if (level != analog_capture_level_) { - // The analog level has been changed; update our internal levels. - capture_levels_.assign(num_handles(), level); - } - } analog_capture_level_ = level; return apm_->kNoError; @@ -309,11 +302,7 @@ int GainControlImpl::Initialize() { return err; } - analog_capture_level_ = - (maximum_capture_level_ - minimum_capture_level_) >> 1; capture_levels_.assign(num_handles(), analog_capture_level_); - was_analog_level_set_ = false; - return apm_->kNoError; } diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/gen_core_neon_offsets.gyp b/media/webrtc/trunk/webrtc/modules/audio_processing/gen_core_neon_offsets.gyp new file mode 100644 index 000000000000..55c79689f7ea --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/gen_core_neon_offsets.gyp @@ -0,0 +1,45 @@ +# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +{ + 'includes': ['lib_core_neon_offsets.gypi'], + 'targets' : [ + { + 'target_name': 'gen_nsx_core_neon_offsets_h', + 'type': 'none', + 'dependencies': [ + 'lib_core_neon_offsets', + '<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx_obj_int_extract#host', + ], + 'sources': ['<(shared_generated_dir)/nsx_core_neon_offsets.o',], + 'variables' : { + 'unpack_lib_name':'nsx_core_neon_offsets.o', + }, + 'includes': [ + '../../../third_party/libvpx/unpack_lib_posix.gypi', + '../../../third_party/libvpx/obj_int_extract.gypi', + ], + }, + { + 'target_name': 'gen_aecm_core_neon_offsets_h', + 'type': 'none', + 'dependencies': [ + 'lib_core_neon_offsets', + '<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx_obj_int_extract#host', + ], + 'variables': { + 'unpack_lib_name':'aecm_core_neon_offsets.o', + }, + 'sources': ['<(shared_generated_dir)/aecm_core_neon_offsets.o',], + 'includes': [ + '../../../third_party/libvpx/unpack_lib_posix.gypi', + '../../../third_party/libvpx/obj_int_extract.gypi', + ], + }, + ], +} diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/gen_core_neon_offsets_chromium.gyp b/media/webrtc/trunk/webrtc/modules/audio_processing/gen_core_neon_offsets_chromium.gyp new file mode 100644 index 000000000000..f4a9134fb211 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/gen_core_neon_offsets_chromium.gyp @@ -0,0 +1,45 @@ +# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +{ + 'includes': ['lib_core_neon_offsets.gypi'], + 'targets' : [ + { + 'target_name': 'gen_nsx_core_neon_offsets_h', + 'type': 'none', + 'dependencies': [ + 'lib_core_neon_offsets', + '<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx_obj_int_extract#host', + ], + 'sources': ['<(shared_generated_dir)/nsx_core_neon_offsets.o',], + 'variables' : { + 'unpack_lib_name':'nsx_core_neon_offsets.o', + }, + 'includes': [ + '../../../../third_party/libvpx/unpack_lib_posix.gypi', + '../../../../third_party/libvpx/obj_int_extract.gypi', + ], + }, + { + 'target_name': 'gen_aecm_core_neon_offsets_h', + 'type': 'none', + 'dependencies': [ + 'lib_core_neon_offsets', + '<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx_obj_int_extract#host', + ], + 'variables': { + 'unpack_lib_name':'aecm_core_neon_offsets.o', + }, + 'sources': ['<(shared_generated_dir)/aecm_core_neon_offsets.o',], + 'includes': [ + '../../../../third_party/libvpx/unpack_lib_posix.gypi', + '../../../../third_party/libvpx/obj_int_extract.gypi', + ], + }, + ], +} diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/include/audio_processing.h b/media/webrtc/trunk/webrtc/modules/audio_processing/include/audio_processing.h index b01cbb32fb48..c34baacb875a 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/include/audio_processing.h +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/include/audio_processing.h @@ -12,9 +12,9 @@ #define WEBRTC_MODULES_AUDIO_PROCESSING_INCLUDE_AUDIO_PROCESSING_H_ #include // size_t +#include // FILE #include "webrtc/common.h" -#include "webrtc/modules/interface/module.h" #include "webrtc/typedefs.h" struct AecCore; @@ -30,6 +30,37 @@ class LevelEstimator; class NoiseSuppression; class VoiceDetection; +// Use to enable the delay correction feature. This now engages an extended +// filter mode in the AEC, along with robustness measures around the reported +// system delays. It comes with a significant increase in AEC complexity, but is +// much more robust to unreliable reported delays. +// +// Detailed changes to the algorithm: +// - The filter length is changed from 48 to 128 ms. This comes with tuning of +// several parameters: i) filter adaptation stepsize and error threshold; +// ii) non-linear processing smoothing and overdrive. +// - Option to ignore the reported delays on platforms which we deem +// sufficiently unreliable. See WEBRTC_UNTRUSTED_DELAY in echo_cancellation.c. +// - Faster startup times by removing the excessive "startup phase" processing +// of reported delays. +// - Much more conservative adjustments to the far-end read pointer. We smooth +// the delay difference more heavily, and back off from the difference more. +// Adjustments force a readaptation of the filter, so they should be avoided +// except when really necessary. +struct DelayCorrection { + DelayCorrection() : enabled(false) {} + explicit DelayCorrection(bool enabled) : enabled(enabled) {} + bool enabled; +}; + +// Must be provided through AudioProcessing::Create(Confg&). It will have no +// impact if used with AudioProcessing::SetExtraOptions(). +struct ExperimentalAgc { + ExperimentalAgc() : enabled(true) {} + explicit ExperimentalAgc(bool enabled) : enabled(enabled) {} + bool enabled; +}; + // The Audio Processing Module (APM) provides a collection of voice processing // components designed for real-time communications software. // @@ -64,11 +95,6 @@ class VoiceDetection; // // Usage example, omitting error checking: // AudioProcessing* apm = AudioProcessing::Create(0); -// apm->set_sample_rate_hz(32000); // Super-wideband processing. -// -// // Mono capture and stereo render. -// apm->set_num_channels(1, 1); -// apm->set_num_reverse_channels(2); // // apm->high_pass_filter()->Enable(true); // @@ -107,35 +133,44 @@ class VoiceDetection; // // Close the application... // delete apm; // -class AudioProcessing : public Module { +class AudioProcessing { public: - // Creates a APM instance, with identifier |id|. Use one instance for every - // primary audio stream requiring processing. On the client-side, this would - // typically be one instance for the near-end stream, and additional instances - // for each far-end stream which requires processing. On the server-side, - // this would typically be one instance for every incoming stream. + // Creates an APM instance. Use one instance for every primary audio stream + // requiring processing. On the client-side, this would typically be one + // instance for the near-end stream, and additional instances for each far-end + // stream which requires processing. On the server-side, this would typically + // be one instance for every incoming stream. + static AudioProcessing* Create(); + // Allows passing in an optional configuration at create-time. + static AudioProcessing* Create(const Config& config); + // TODO(ajm): Deprecated; remove all calls to it. static AudioProcessing* Create(int id); virtual ~AudioProcessing() {} // Initializes internal states, while retaining all user settings. This // should be called before beginning to process a new audio stream. However, // it is not necessary to call before processing the first stream after - // creation. - // - // set_sample_rate_hz(), set_num_channels() and set_num_reverse_channels() - // will trigger a full initialization if the settings are changed from their - // existing values. Otherwise they are no-ops. + // creation. It is also not necessary to call if the audio parameters (sample + // rate and number of channels) have changed. Passing updated parameters + // directly to |ProcessStream()| and |AnalyzeReverseStream()| is permissible. virtual int Initialize() = 0; // Pass down additional options which don't have explicit setters. This // ensures the options are applied immediately. virtual void SetExtraOptions(const Config& config) = 0; + virtual int EnableExperimentalNs(bool enable) = 0; + virtual bool experimental_ns_enabled() const = 0; + + // DEPRECATED: It is now possible to modify the sample rate directly in a call + // to |ProcessStream|. // Sets the sample |rate| in Hz for both the primary and reverse audio // streams. 8000, 16000 or 32000 Hz are permitted. virtual int set_sample_rate_hz(int rate) = 0; virtual int sample_rate_hz() const = 0; + // DEPRECATED: It is now possible to modify the number of channels directly in + // a call to |ProcessStream|. // Sets the number of channels for the primary audio stream. Input frames must // contain a number of channels given by |input_channels|, while output frames // will be returned with number of channels given by |output_channels|. @@ -143,11 +178,20 @@ class AudioProcessing : public Module { virtual int num_input_channels() const = 0; virtual int num_output_channels() const = 0; + // DEPRECATED: It is now possible to modify the number of channels directly in + // a call to |AnalyzeReverseStream|. // Sets the number of channels for the reverse audio stream. Input frames must // contain a number of channels given by |channels|. virtual int set_num_reverse_channels(int channels) = 0; virtual int num_reverse_channels() const = 0; + // Set to true when the output of AudioProcessing will be muted or in some + // other way not used. Ideally, the captured audio would still be processed, + // but some components may change behavior based on this information. + // Default false. + virtual void set_output_will_be_muted(bool muted) = 0; + virtual bool output_will_be_muted() const = 0; + // Processes a 10 ms |frame| of the primary audio stream. On the client-side, // this is the near-end (or captured) audio. // @@ -156,8 +200,8 @@ class AudioProcessing : public Module { // with the stream_ tag which is needed should be called after processing. // // The |sample_rate_hz_|, |num_channels_|, and |samples_per_channel_| - // members of |frame| must be valid, and correspond to settings supplied - // to APM. + // members of |frame| must be valid. If changed from the previous call to this + // method, it will trigger an initialization. virtual int ProcessStream(AudioFrame* frame) = 0; // Analyzes a 10 ms |frame| of the reverse direction audio stream. The frame @@ -171,7 +215,8 @@ class AudioProcessing : public Module { // chances are you don't need to use it. // // The |sample_rate_hz_|, |num_channels_|, and |samples_per_channel_| - // members of |frame| must be valid. + // members of |frame| must be valid. |sample_rate_hz_| must correspond to + // |sample_rate_hz()| // // TODO(ajm): add const to input; requires an implementation fix. virtual int AnalyzeReverseStream(AudioFrame* frame) = 0; @@ -192,6 +237,11 @@ class AudioProcessing : public Module { virtual int set_stream_delay_ms(int delay) = 0; virtual int stream_delay_ms() const = 0; + // Call to signal that a key press occurred (true) or did not occur (false) + // with this chunk of audio. + virtual void set_stream_key_pressed(bool key_pressed) = 0; + virtual bool stream_key_pressed() const = 0; + // Sets a delay |offset| in ms to add to the values passed in through // set_stream_delay_ms(). May be positive or negative. // @@ -207,6 +257,10 @@ class AudioProcessing : public Module { static const size_t kMaxFilenameSize = 1024; virtual int StartDebugRecording(const char filename[kMaxFilenameSize]) = 0; + // Same as above but uses an existing file handle. Takes ownership + // of |handle| and closes it at StopDebugRecording(). + virtual int StartDebugRecording(FILE* handle) = 0; + // Stops recording debugging information, and closes the file. Recording // cannot be resumed in the same file (without overwriting it). virtual int StopDebugRecording() = 0; @@ -250,10 +304,6 @@ class AudioProcessing : public Module { // will continue, but the parameter may have been truncated. kBadStreamParameterWarning = -13 }; - - // Inherited from Module. - virtual int32_t TimeUntilNextProcess() OVERRIDE; - virtual int32_t Process() OVERRIDE; }; // The acoustic echo cancellation (AEC) component provides better performance diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/include/mock_audio_processing.h b/media/webrtc/trunk/webrtc/modules/audio_processing/include/mock_audio_processing.h index 9a36fe84e8cc..0383448c77ed 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/include/mock_audio_processing.h +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/include/mock_audio_processing.h @@ -183,6 +183,10 @@ class MockAudioProcessing : public AudioProcessing { int()); MOCK_METHOD1(SetExtraOptions, void(const Config& config)); + MOCK_METHOD1(EnableExperimentalNs, + int(bool enable)); + MOCK_CONST_METHOD0(experimental_ns_enabled, + bool()); MOCK_METHOD1(set_sample_rate_hz, int(int rate)); MOCK_CONST_METHOD0(sample_rate_hz, @@ -197,6 +201,10 @@ class MockAudioProcessing : public AudioProcessing { int(int channels)); MOCK_CONST_METHOD0(num_reverse_channels, int()); + MOCK_METHOD1(set_output_will_be_muted, + void(bool muted)); + MOCK_CONST_METHOD0(output_will_be_muted, + bool()); MOCK_METHOD1(ProcessStream, int(AudioFrame* frame)); MOCK_METHOD1(AnalyzeReverseStream, @@ -205,12 +213,18 @@ class MockAudioProcessing : public AudioProcessing { int(int delay)); MOCK_CONST_METHOD0(stream_delay_ms, int()); + MOCK_METHOD1(set_stream_key_pressed, + void(bool key_pressed)); + MOCK_CONST_METHOD0(stream_key_pressed, + bool()); MOCK_METHOD1(set_delay_offset_ms, void(int offset)); MOCK_CONST_METHOD0(delay_offset_ms, int()); MOCK_METHOD1(StartDebugRecording, int(const char filename[kMaxFilenameSize])); + MOCK_METHOD1(StartDebugRecording, + int(FILE* handle)); MOCK_METHOD0(StopDebugRecording, int()); virtual MockEchoCancellation* echo_cancellation() const { diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/lib_core_neon_offsets.gypi b/media/webrtc/trunk/webrtc/modules/audio_processing/lib_core_neon_offsets.gypi new file mode 100644 index 000000000000..42ec9acfbf95 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/lib_core_neon_offsets.gypi @@ -0,0 +1,51 @@ +# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +# This file has common information for gen_core_neon_offsets.gyp +# and gen_core_neon_offsets_chromium.gyp +{ + 'variables': { + 'variables' : { + 'lib_intermediate_name': '', + 'conditions' : [ + ['android_webview_build==1', { + 'lib_intermediate_name' : '<(android_src)/$(call intermediates-dir-for, STATIC_LIBRARIES, lib_core_neon_offsets)/lib_core_neon_offsets.a', + }], + ], + }, + 'shared_generated_dir': '<(SHARED_INTERMEDIATE_DIR)/audio_processing/asm_offsets', + 'output_dir': '<(shared_generated_dir)', + 'output_format': 'cheader', + 'unpack_lib_search_path_list': [ + '-a', '<(PRODUCT_DIR)/lib_core_neon_offsets.a', + '-a', '<(LIB_DIR)/webrtc/modules/audio_processing/lib_core_neon_offsets.a', + '-a', '<(LIB_DIR)/third_party/webrtc/modules/audio_processing/lib_core_neon_offsets.a', + '-a', '<(lib_intermediate_name)', + ], + 'unpack_lib_output_dir':'<(shared_generated_dir)', + }, + 'includes': [ + '../../build/common.gypi', + ], + 'conditions': [ + ['((target_arch=="arm" and arm_version==7) or target_arch=="armv7") and (OS=="android" or OS=="ios")', { + 'targets' : [ + { + 'target_name': 'lib_core_neon_offsets', + 'type': 'static_library', + 'android_unmangled_name': 1, + 'hard_dependency': 1, + 'sources': [ + 'ns/nsx_core_neon_offsets.c', + 'aecm/aecm_core_neon_offsets.c', + ], + }, + ], + }], + ], +} diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/ns/nsx_core.c b/media/webrtc/trunk/webrtc/modules/audio_processing/ns/nsx_core.c index 44cd68558b8f..e627a2eff2d4 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/ns/nsx_core.c +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/ns/nsx_core.c @@ -70,142 +70,6 @@ static const int16_t WebRtcNsx_kLogTableFrac[256] = { // Skip first frequency bins during estimation. (0 <= value < 64) static const int kStartBand = 5; -static const int16_t kPowTableFrac[1024] = { - 0, 1, 1, 2, 3, 3, 4, 5, - 6, 6, 7, 8, 8, 9, 10, 10, - 11, 12, 13, 13, 14, 15, 15, 16, - 17, 17, 18, 19, 20, 20, 21, 22, - 22, 23, 24, 25, 25, 26, 27, 27, - 28, 29, 30, 30, 31, 32, 32, 33, - 34, 35, 35, 36, 37, 37, 38, 39, - 40, 40, 41, 42, 42, 43, 44, 45, - 45, 46, 47, 48, 48, 49, 50, 50, - 51, 52, 53, 53, 54, 55, 56, 56, - 57, 58, 58, 59, 60, 61, 61, 62, - 63, 64, 64, 65, 66, 67, 67, 68, - 69, 69, 70, 71, 72, 72, 73, 74, - 75, 75, 76, 77, 78, 78, 79, 80, - 81, 81, 82, 83, 84, 84, 85, 86, - 87, 87, 88, 89, 90, 90, 91, 92, - 93, 93, 94, 95, 96, 96, 97, 98, - 99, 100, 100, 101, 102, 103, 103, 104, - 105, 106, 106, 107, 108, 109, 109, 110, - 111, 112, 113, 113, 114, 115, 116, 116, - 117, 118, 119, 119, 120, 121, 122, 123, - 123, 124, 125, 126, 126, 127, 128, 129, - 130, 130, 131, 132, 133, 133, 134, 135, - 136, 137, 137, 138, 139, 140, 141, 141, - 142, 143, 144, 144, 145, 146, 147, 148, - 148, 149, 150, 151, 152, 152, 153, 154, - 155, 156, 156, 157, 158, 159, 160, 160, - 161, 162, 163, 164, 164, 165, 166, 167, - 168, 168, 169, 170, 171, 172, 173, 173, - 174, 175, 176, 177, 177, 178, 179, 180, - 181, 181, 182, 183, 184, 185, 186, 186, - 187, 188, 189, 190, 190, 191, 192, 193, - 194, 195, 195, 196, 197, 198, 199, 200, - 200, 201, 202, 203, 204, 205, 205, 206, - 207, 208, 209, 210, 210, 211, 212, 213, - 214, 215, 215, 216, 217, 218, 219, 220, - 220, 221, 222, 223, 224, 225, 225, 226, - 227, 228, 229, 230, 231, 231, 232, 233, - 234, 235, 236, 237, 237, 238, 239, 240, - 241, 242, 243, 243, 244, 245, 246, 247, - 248, 249, 249, 250, 251, 252, 253, 254, - 255, 255, 256, 257, 258, 259, 260, 261, - 262, 262, 263, 264, 265, 266, 267, 268, - 268, 269, 270, 271, 272, 273, 274, 275, - 276, 276, 277, 278, 279, 280, 281, 282, - 283, 283, 284, 285, 286, 287, 288, 289, - 290, 291, 291, 292, 293, 294, 295, 296, - 297, 298, 299, 299, 300, 301, 302, 303, - 304, 305, 306, 307, 308, 308, 309, 310, - 311, 312, 313, 314, 315, 316, 317, 318, - 318, 319, 320, 321, 322, 323, 324, 325, - 326, 327, 328, 328, 329, 330, 331, 332, - 333, 334, 335, 336, 337, 338, 339, 339, - 340, 341, 342, 343, 344, 345, 346, 347, - 348, 349, 350, 351, 352, 352, 353, 354, - 355, 356, 357, 358, 359, 360, 361, 362, - 363, 364, 365, 366, 367, 367, 368, 369, - 370, 371, 372, 373, 374, 375, 376, 377, - 378, 379, 380, 381, 382, 383, 384, 385, - 385, 386, 387, 388, 389, 390, 391, 392, - 393, 394, 395, 396, 397, 398, 399, 400, - 401, 402, 403, 404, 405, 406, 407, 408, - 409, 410, 410, 411, 412, 413, 414, 415, - 416, 417, 418, 419, 420, 421, 422, 423, - 424, 425, 426, 427, 428, 429, 430, 431, - 432, 433, 434, 435, 436, 437, 438, 439, - 440, 441, 442, 443, 444, 445, 446, 447, - 448, 449, 450, 451, 452, 453, 454, 455, - 456, 457, 458, 459, 460, 461, 462, 463, - 464, 465, 466, 467, 468, 469, 470, 471, - 472, 473, 474, 475, 476, 477, 478, 479, - 480, 481, 482, 483, 484, 485, 486, 487, - 488, 489, 490, 491, 492, 493, 494, 495, - 496, 498, 499, 500, 501, 502, 503, 504, - 505, 506, 507, 508, 509, 510, 511, 512, - 513, 514, 515, 516, 517, 518, 519, 520, - 521, 522, 523, 525, 526, 527, 528, 529, - 530, 531, 532, 533, 534, 535, 536, 537, - 538, 539, 540, 541, 542, 544, 545, 546, - 547, 548, 549, 550, 551, 552, 553, 554, - 555, 556, 557, 558, 560, 561, 562, 563, - 564, 565, 566, 567, 568, 569, 570, 571, - 572, 574, 575, 576, 577, 578, 579, 580, - 581, 582, 583, 584, 585, 587, 588, 589, - 590, 591, 592, 593, 594, 595, 596, 597, - 599, 600, 601, 602, 603, 604, 605, 606, - 607, 608, 610, 611, 612, 613, 614, 615, - 616, 617, 618, 620, 621, 622, 623, 624, - 625, 626, 627, 628, 630, 631, 632, 633, - 634, 635, 636, 637, 639, 640, 641, 642, - 643, 644, 645, 646, 648, 649, 650, 651, - 652, 653, 654, 656, 657, 658, 659, 660, - 661, 662, 664, 665, 666, 667, 668, 669, - 670, 672, 673, 674, 675, 676, 677, 678, - 680, 681, 682, 683, 684, 685, 687, 688, - 689, 690, 691, 692, 693, 695, 696, 697, - 698, 699, 700, 702, 703, 704, 705, 706, - 708, 709, 710, 711, 712, 713, 715, 716, - 717, 718, 719, 720, 722, 723, 724, 725, - 726, 728, 729, 730, 731, 732, 733, 735, - 736, 737, 738, 739, 741, 742, 743, 744, - 745, 747, 748, 749, 750, 751, 753, 754, - 755, 756, 757, 759, 760, 761, 762, 763, - 765, 766, 767, 768, 770, 771, 772, 773, - 774, 776, 777, 778, 779, 780, 782, 783, - 784, 785, 787, 788, 789, 790, 792, 793, - 794, 795, 796, 798, 799, 800, 801, 803, - 804, 805, 806, 808, 809, 810, 811, 813, - 814, 815, 816, 818, 819, 820, 821, 823, - 824, 825, 826, 828, 829, 830, 831, 833, - 834, 835, 836, 838, 839, 840, 841, 843, - 844, 845, 846, 848, 849, 850, 851, 853, - 854, 855, 857, 858, 859, 860, 862, 863, - 864, 866, 867, 868, 869, 871, 872, 873, - 874, 876, 877, 878, 880, 881, 882, 883, - 885, 886, 887, 889, 890, 891, 893, 894, - 895, 896, 898, 899, 900, 902, 903, 904, - 906, 907, 908, 909, 911, 912, 913, 915, - 916, 917, 919, 920, 921, 923, 924, 925, - 927, 928, 929, 931, 932, 933, 935, 936, - 937, 938, 940, 941, 942, 944, 945, 946, - 948, 949, 950, 952, 953, 955, 956, 957, - 959, 960, 961, 963, 964, 965, 967, 968, - 969, 971, 972, 973, 975, 976, 977, 979, - 980, 981, 983, 984, 986, 987, 988, 990, - 991, 992, 994, 995, 996, 998, 999, 1001, - 1002, 1003, 1005, 1006, 1007, 1009, 1010, 1012, - 1013, 1014, 1016, 1017, 1018, 1020, 1021, 1023 -}; - -static const int16_t kIndicatorTable[17] = { - 0, 2017, 3809, 5227, 6258, 6963, 7424, 7718, - 7901, 8014, 8084, 8126, 8152, 8168, 8177, 8183, 8187 -}; - // hybrib Hanning & flat window static const int16_t kBlocks80w128x[128] = { 0, 536, 1072, 1606, 2139, 2669, 3196, 3720, 4240, 4756, 5266, @@ -612,7 +476,7 @@ static void PrepareSpectrumC(NsxInst_t* inst, int16_t* freq_buf) { } // Denormalize the real-valued signal |in|, the output from inverse FFT. -static __inline void Denormalize(NsxInst_t* inst, int16_t* in, int factor) { +static void DenormalizeC(NsxInst_t* inst, int16_t* in, int factor) { int i = 0; int32_t tmp32 = 0; for (i = 0; i < inst->anaLen; i += 1) { @@ -677,9 +541,9 @@ static void AnalysisUpdateC(NsxInst_t* inst, } // Normalize the real-valued signal |in|, the input to forward FFT. -static __inline void NormalizeRealBuffer(NsxInst_t* inst, - const int16_t* in, - int16_t* out) { +static void NormalizeRealBufferC(NsxInst_t* inst, + const int16_t* in, + int16_t* out) { int i = 0; for (i = 0; i < inst->anaLen; ++i) { out[i] = WEBRTC_SPL_LSHIFT_W16(in[i], inst->normData); // Q(normData) @@ -691,6 +555,8 @@ NoiseEstimation WebRtcNsx_NoiseEstimation; PrepareSpectrum WebRtcNsx_PrepareSpectrum; SynthesisUpdate WebRtcNsx_SynthesisUpdate; AnalysisUpdate WebRtcNsx_AnalysisUpdate; +Denormalize WebRtcNsx_Denormalize; +NormalizeRealBuffer WebRtcNsx_NormalizeRealBuffer; #if (defined WEBRTC_DETECT_ARM_NEON || defined WEBRTC_ARCH_ARM_NEON) // Initialize function pointers for ARM Neon platform. @@ -702,6 +568,19 @@ static void WebRtcNsx_InitNeon(void) { } #endif +#if defined(MIPS32_LE) +// Initialize function pointers for MIPS platform. +static void WebRtcNsx_InitMips(void) { + WebRtcNsx_PrepareSpectrum = WebRtcNsx_PrepareSpectrum_mips; + WebRtcNsx_SynthesisUpdate = WebRtcNsx_SynthesisUpdate_mips; + WebRtcNsx_AnalysisUpdate = WebRtcNsx_AnalysisUpdate_mips; + WebRtcNsx_NormalizeRealBuffer = WebRtcNsx_NormalizeRealBuffer_mips; +#if defined(MIPS_DSP_R1_LE) + WebRtcNsx_Denormalize = WebRtcNsx_Denormalize_mips; +#endif +} +#endif + void WebRtcNsx_CalcParametricNoiseEstimate(NsxInst_t* inst, int16_t pink_noise_exp_avg, int32_t pink_noise_num_avg, @@ -889,6 +768,8 @@ int32_t WebRtcNsx_InitCore(NsxInst_t* inst, uint32_t fs) { WebRtcNsx_PrepareSpectrum = PrepareSpectrumC; WebRtcNsx_SynthesisUpdate = SynthesisUpdateC; WebRtcNsx_AnalysisUpdate = AnalysisUpdateC; + WebRtcNsx_Denormalize = DenormalizeC; + WebRtcNsx_NormalizeRealBuffer = NormalizeRealBufferC; #ifdef WEBRTC_DETECT_ARM_NEON uint64_t features = WebRtc_GetCPUFeaturesARM(); @@ -899,6 +780,10 @@ int32_t WebRtcNsx_InitCore(NsxInst_t* inst, uint32_t fs) { WebRtcNsx_InitNeon(); #endif +#if defined(MIPS32_LE) + WebRtcNsx_InitMips(); +#endif + inst->initFlag = 1; return 0; @@ -1300,239 +1185,6 @@ void WebRtcNsx_ComputeSpectralDifference(NsxInst_t* inst, uint16_t* magnIn) { } } -// Compute speech/noise probability -// speech/noise probability is returned in: probSpeechFinal -//snrLocPrior is the prior SNR for each frequency (in Q11) -//snrLocPost is the post SNR for each frequency (in Q11) -void WebRtcNsx_SpeechNoiseProb(NsxInst_t* inst, uint16_t* nonSpeechProbFinal, - uint32_t* priorLocSnr, uint32_t* postLocSnr) { - uint32_t zeros, num, den, tmpU32no1, tmpU32no2, tmpU32no3; - - int32_t invLrtFX, indPriorFX, tmp32, tmp32no1, tmp32no2, besselTmpFX32; - int32_t frac32, logTmp; - int32_t logLrtTimeAvgKsumFX; - - int16_t indPriorFX16; - int16_t tmp16, tmp16no1, tmp16no2, tmpIndFX, tableIndex, frac, intPart; - - int i, normTmp, normTmp2, nShifts; - - // compute feature based on average LR factor - // this is the average over all frequencies of the smooth log LRT - logLrtTimeAvgKsumFX = 0; - for (i = 0; i < inst->magnLen; i++) { - besselTmpFX32 = (int32_t)postLocSnr[i]; // Q11 - normTmp = WebRtcSpl_NormU32(postLocSnr[i]); - num = WEBRTC_SPL_LSHIFT_U32(postLocSnr[i], normTmp); // Q(11+normTmp) - if (normTmp > 10) { - den = WEBRTC_SPL_LSHIFT_U32(priorLocSnr[i], normTmp - 11); // Q(normTmp) - } else { - den = WEBRTC_SPL_RSHIFT_U32(priorLocSnr[i], 11 - normTmp); // Q(normTmp) - } - if (den > 0) { - besselTmpFX32 -= WEBRTC_SPL_UDIV(num, den); // Q11 - } else { - besselTmpFX32 -= num; // Q11 - } - - // inst->logLrtTimeAvg[i] += LRT_TAVG * (besselTmp - log(snrLocPrior) - inst->logLrtTimeAvg[i]); - // Here, LRT_TAVG = 0.5 - zeros = WebRtcSpl_NormU32(priorLocSnr[i]); - frac32 = (int32_t)(((priorLocSnr[i] << zeros) & 0x7FFFFFFF) >> 19); - tmp32 = WEBRTC_SPL_MUL(frac32, frac32); - tmp32 = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(tmp32, -43), 19); - tmp32 += WEBRTC_SPL_MUL_16_16_RSFT((int16_t)frac32, 5412, 12); - frac32 = tmp32 + 37; - // tmp32 = log2(priorLocSnr[i]) - tmp32 = (int32_t)(((31 - zeros) << 12) + frac32) - (11 << 12); // Q12 - logTmp = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_32_16(tmp32, 178), 8); // log2(priorLocSnr[i])*log(2) - tmp32no1 = WEBRTC_SPL_RSHIFT_W32(logTmp + inst->logLrtTimeAvgW32[i], 1); // Q12 - inst->logLrtTimeAvgW32[i] += (besselTmpFX32 - tmp32no1); // Q12 - - logLrtTimeAvgKsumFX += inst->logLrtTimeAvgW32[i]; // Q12 - } - inst->featureLogLrt = WEBRTC_SPL_RSHIFT_W32(logLrtTimeAvgKsumFX * 5, inst->stages + 10); // 5 = BIN_SIZE_LRT / 2 - // done with computation of LR factor - - // - //compute the indicator functions - // - - // average LRT feature - // FLOAT code - // indicator0 = 0.5 * (tanh(widthPrior * (logLrtTimeAvgKsum - threshPrior0)) + 1.0); - tmpIndFX = 16384; // Q14(1.0) - tmp32no1 = logLrtTimeAvgKsumFX - inst->thresholdLogLrt; // Q12 - nShifts = 7 - inst->stages; // WIDTH_PR_MAP_SHIFT - inst->stages + 5; - //use larger width in tanh map for pause regions - if (tmp32no1 < 0) { - tmpIndFX = 0; - tmp32no1 = -tmp32no1; - //widthPrior = widthPrior * 2.0; - nShifts++; - } - tmp32no1 = WEBRTC_SPL_SHIFT_W32(tmp32no1, nShifts); // Q14 - // compute indicator function: sigmoid map - tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32no1, 14); - if ((tableIndex < 16) && (tableIndex >= 0)) { - tmp16no2 = kIndicatorTable[tableIndex]; - tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex]; - frac = (int16_t)(tmp32no1 & 0x00003fff); // Q14 - tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tmp16no1, frac, 14); - if (tmpIndFX == 0) { - tmpIndFX = 8192 - tmp16no2; // Q14 - } else { - tmpIndFX = 8192 + tmp16no2; // Q14 - } - } - indPriorFX = WEBRTC_SPL_MUL_16_16(inst->weightLogLrt, tmpIndFX); // 6*Q14 - - //spectral flatness feature - if (inst->weightSpecFlat) { - tmpU32no1 = WEBRTC_SPL_UMUL(inst->featureSpecFlat, 400); // Q10 - tmpIndFX = 16384; // Q14(1.0) - //use larger width in tanh map for pause regions - tmpU32no2 = inst->thresholdSpecFlat - tmpU32no1; //Q10 - nShifts = 4; - if (inst->thresholdSpecFlat < tmpU32no1) { - tmpIndFX = 0; - tmpU32no2 = tmpU32no1 - inst->thresholdSpecFlat; - //widthPrior = widthPrior * 2.0; - nShifts++; - } - tmp32no1 = (int32_t)WebRtcSpl_DivU32U16(WEBRTC_SPL_LSHIFT_U32(tmpU32no2, - nShifts), 25); //Q14 - tmpU32no1 = WebRtcSpl_DivU32U16(WEBRTC_SPL_LSHIFT_U32(tmpU32no2, nShifts), 25); //Q14 - // compute indicator function: sigmoid map - // FLOAT code - // indicator1 = 0.5 * (tanh(sgnMap * widthPrior * (threshPrior1 - tmpFloat1)) + 1.0); - tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 14); - if (tableIndex < 16) { - tmp16no2 = kIndicatorTable[tableIndex]; - tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex]; - frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14 - tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tmp16no1, frac, 14); - if (tmpIndFX) { - tmpIndFX = 8192 + tmp16no2; // Q14 - } else { - tmpIndFX = 8192 - tmp16no2; // Q14 - } - } - indPriorFX += WEBRTC_SPL_MUL_16_16(inst->weightSpecFlat, tmpIndFX); // 6*Q14 - } - - //for template spectral-difference - if (inst->weightSpecDiff) { - tmpU32no1 = 0; - if (inst->featureSpecDiff) { - normTmp = WEBRTC_SPL_MIN(20 - inst->stages, - WebRtcSpl_NormU32(inst->featureSpecDiff)); - tmpU32no1 = WEBRTC_SPL_LSHIFT_U32(inst->featureSpecDiff, normTmp); // Q(normTmp-2*stages) - tmpU32no2 = WEBRTC_SPL_RSHIFT_U32(inst->timeAvgMagnEnergy, 20 - inst->stages - - normTmp); - if (tmpU32no2 > 0) { - // Q(20 - inst->stages) - tmpU32no1 = WEBRTC_SPL_UDIV(tmpU32no1, tmpU32no2); - } else { - tmpU32no1 = (uint32_t)(0x7fffffff); - } - } - tmpU32no3 = WEBRTC_SPL_UDIV(WEBRTC_SPL_LSHIFT_U32(inst->thresholdSpecDiff, 17), 25); - tmpU32no2 = tmpU32no1 - tmpU32no3; - nShifts = 1; - tmpIndFX = 16384; // Q14(1.0) - //use larger width in tanh map for pause regions - if (tmpU32no2 & 0x80000000) { - tmpIndFX = 0; - tmpU32no2 = tmpU32no3 - tmpU32no1; - //widthPrior = widthPrior * 2.0; - nShifts--; - } - tmpU32no1 = WEBRTC_SPL_RSHIFT_U32(tmpU32no2, nShifts); - // compute indicator function: sigmoid map - /* FLOAT code - indicator2 = 0.5 * (tanh(widthPrior * (tmpFloat1 - threshPrior2)) + 1.0); - */ - tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 14); - if (tableIndex < 16) { - tmp16no2 = kIndicatorTable[tableIndex]; - tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex]; - frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14 - tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND( - tmp16no1, frac, 14); - if (tmpIndFX) { - tmpIndFX = 8192 + tmp16no2; - } else { - tmpIndFX = 8192 - tmp16no2; - } - } - indPriorFX += WEBRTC_SPL_MUL_16_16(inst->weightSpecDiff, tmpIndFX); // 6*Q14 - } - - //combine the indicator function with the feature weights - // FLOAT code - // indPrior = 1 - (weightIndPrior0 * indicator0 + weightIndPrior1 * indicator1 + weightIndPrior2 * indicator2); - indPriorFX16 = WebRtcSpl_DivW32W16ResW16(98307 - indPriorFX, 6); // Q14 - // done with computing indicator function - - //compute the prior probability - // FLOAT code - // inst->priorNonSpeechProb += PRIOR_UPDATE * (indPriorNonSpeech - inst->priorNonSpeechProb); - tmp16 = indPriorFX16 - inst->priorNonSpeechProb; // Q14 - inst->priorNonSpeechProb += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT( - PRIOR_UPDATE_Q14, tmp16, 14); // Q14 - - //final speech probability: combine prior model with LR factor: - - memset(nonSpeechProbFinal, 0, sizeof(uint16_t) * inst->magnLen); - - if (inst->priorNonSpeechProb > 0) { - for (i = 0; i < inst->magnLen; i++) { - // FLOAT code - // invLrt = exp(inst->logLrtTimeAvg[i]); - // invLrt = inst->priorSpeechProb * invLrt; - // nonSpeechProbFinal[i] = (1.0 - inst->priorSpeechProb) / (1.0 - inst->priorSpeechProb + invLrt); - // invLrt = (1.0 - inst->priorNonSpeechProb) * invLrt; - // nonSpeechProbFinal[i] = inst->priorNonSpeechProb / (inst->priorNonSpeechProb + invLrt); - if (inst->logLrtTimeAvgW32[i] < 65300) { - tmp32no1 = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(inst->logLrtTimeAvgW32[i], 23637), - 14); // Q12 - intPart = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32no1, 12); - if (intPart < -8) { - intPart = -8; - } - frac = (int16_t)(tmp32no1 & 0x00000fff); // Q12 - - // Quadratic approximation of 2^frac - tmp32no2 = WEBRTC_SPL_RSHIFT_W32(frac * frac * 44, 19); // Q12 - tmp32no2 += WEBRTC_SPL_MUL_16_16_RSFT(frac, 84, 7); // Q12 - invLrtFX = WEBRTC_SPL_LSHIFT_W32(1, 8 + intPart) - + WEBRTC_SPL_SHIFT_W32(tmp32no2, intPart - 4); // Q8 - - normTmp = WebRtcSpl_NormW32(invLrtFX); - normTmp2 = WebRtcSpl_NormW16((16384 - inst->priorNonSpeechProb)); - if (normTmp + normTmp2 >= 7) { - if (normTmp + normTmp2 < 15) { - invLrtFX = WEBRTC_SPL_RSHIFT_W32(invLrtFX, 15 - normTmp2 - normTmp); - // Q(normTmp+normTmp2-7) - tmp32no1 = WEBRTC_SPL_MUL_32_16(invLrtFX, (16384 - inst->priorNonSpeechProb)); - // Q(normTmp+normTmp2+7) - invLrtFX = WEBRTC_SPL_SHIFT_W32(tmp32no1, 7 - normTmp - normTmp2); // Q14 - } else { - tmp32no1 = WEBRTC_SPL_MUL_32_16(invLrtFX, (16384 - inst->priorNonSpeechProb)); // Q22 - invLrtFX = WEBRTC_SPL_RSHIFT_W32(tmp32no1, 8); // Q14 - } - - tmp32no1 = WEBRTC_SPL_LSHIFT_W32((int32_t)inst->priorNonSpeechProb, 8); // Q22 - - nonSpeechProbFinal[i] = (uint16_t)WEBRTC_SPL_DIV(tmp32no1, - (int32_t)inst->priorNonSpeechProb + invLrtFX); // Q8 - } - } - } - } -} - // Transform input (speechFrame) to frequency domain magnitude (magnU16) void WebRtcNsx_DataAnalysis(NsxInst_t* inst, short* speechFrame, uint16_t* magnU16) { @@ -1592,7 +1244,7 @@ void WebRtcNsx_DataAnalysis(NsxInst_t* inst, short* speechFrame, uint16_t* magnU right_shifts_in_magnU16 = WEBRTC_SPL_MAX(right_shifts_in_magnU16, 0); // create realImag as winData interleaved with zeros (= imag. part), normalize it - NormalizeRealBuffer(inst, winData, realImag); + WebRtcNsx_NormalizeRealBuffer(inst, winData, realImag); // FFT output will be in winData[]. WebRtcSpl_RealForwardFFT(inst->real_fft, realImag, winData); @@ -1824,7 +1476,7 @@ void WebRtcNsx_DataSynthesis(NsxInst_t* inst, short* outFrame) { // Inverse FFT output will be in rfft_out[]. outCIFFT = WebRtcSpl_RealInverseFFT(inst->real_fft, realImag, rfft_out); - Denormalize(inst, rfft_out, outCIFFT); + WebRtcNsx_Denormalize(inst, rfft_out, outCIFFT); //scale factor: only do it after END_STARTUP_LONG time gainFactor = 8192; // 8192 = Q13(1.0) diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/ns/nsx_core.h b/media/webrtc/trunk/webrtc/modules/audio_processing/ns/nsx_core.h index 1ad369ffbeb9..5b3c5e78f4eb 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/ns/nsx_core.h +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/ns/nsx_core.h @@ -201,6 +201,23 @@ typedef void (*AnalysisUpdate)(NsxInst_t* inst, int16_t* new_speech); extern AnalysisUpdate WebRtcNsx_AnalysisUpdate; +// Denormalize the real-valued signal |in|, the output from inverse FFT. +typedef void (*Denormalize) (NsxInst_t* inst, int16_t* in, int factor); +extern Denormalize WebRtcNsx_Denormalize; + +// Normalize the real-valued signal |in|, the input to forward FFT. +typedef void (*NormalizeRealBuffer) (NsxInst_t* inst, + const int16_t* in, + int16_t* out); +extern NormalizeRealBuffer WebRtcNsx_NormalizeRealBuffer; + +// Compute speech/noise probability. +// Intended to be private. +void WebRtcNsx_SpeechNoiseProb(NsxInst_t* inst, + uint16_t* nonSpeechProbFinal, + uint32_t* priorLocSnr, + uint32_t* postLocSnr); + #if (defined WEBRTC_DETECT_ARM_NEON) || defined (WEBRTC_ARCH_ARM_NEON) // For the above function pointers, functions for generic platforms are declared // and defined as static in file nsx_core.c, while those for ARM Neon platforms @@ -218,6 +235,26 @@ void WebRtcNsx_AnalysisUpdateNeon(NsxInst_t* inst, void WebRtcNsx_PrepareSpectrumNeon(NsxInst_t* inst, int16_t* freq_buff); #endif +#if defined(MIPS32_LE) +// For the above function pointers, functions for generic platforms are declared +// and defined as static in file nsx_core.c, while those for MIPS platforms +// are declared below and defined in file nsx_core_mips.c. +void WebRtcNsx_SynthesisUpdate_mips(NsxInst_t* inst, + int16_t* out_frame, + int16_t gain_factor); +void WebRtcNsx_AnalysisUpdate_mips(NsxInst_t* inst, + int16_t* out, + int16_t* new_speech); +void WebRtcNsx_PrepareSpectrum_mips(NsxInst_t* inst, int16_t* freq_buff); +void WebRtcNsx_NormalizeRealBuffer_mips(NsxInst_t* inst, + const int16_t* in, + int16_t* out); +#if defined(MIPS_DSP_R1_LE) +void WebRtcNsx_Denormalize_mips(NsxInst_t* inst, int16_t* in, int factor); +#endif + +#endif + #ifdef __cplusplus } #endif diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/ns/nsx_core_c.c b/media/webrtc/trunk/webrtc/modules/audio_processing/ns/nsx_core_c.c new file mode 100644 index 000000000000..452b96e77b04 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/ns/nsx_core_c.c @@ -0,0 +1,273 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/audio_processing/ns/include/noise_suppression_x.h" +#include "webrtc/modules/audio_processing/ns/nsx_core.h" + +static const int16_t kIndicatorTable[17] = { + 0, 2017, 3809, 5227, 6258, 6963, 7424, 7718, + 7901, 8014, 8084, 8126, 8152, 8168, 8177, 8183, 8187 +}; + +// Compute speech/noise probability +// speech/noise probability is returned in: probSpeechFinal +//snrLocPrior is the prior SNR for each frequency (in Q11) +//snrLocPost is the post SNR for each frequency (in Q11) +void WebRtcNsx_SpeechNoiseProb(NsxInst_t* inst, + uint16_t* nonSpeechProbFinal, + uint32_t* priorLocSnr, + uint32_t* postLocSnr) { + + uint32_t zeros, num, den, tmpU32no1, tmpU32no2, tmpU32no3; + int32_t invLrtFX, indPriorFX, tmp32, tmp32no1, tmp32no2, besselTmpFX32; + int32_t frac32, logTmp; + int32_t logLrtTimeAvgKsumFX; + int16_t indPriorFX16; + int16_t tmp16, tmp16no1, tmp16no2, tmpIndFX, tableIndex, frac, intPart; + int i, normTmp, normTmp2, nShifts; + + // compute feature based on average LR factor + // this is the average over all frequencies of the smooth log LRT + logLrtTimeAvgKsumFX = 0; + for (i = 0; i < inst->magnLen; i++) { + besselTmpFX32 = (int32_t)postLocSnr[i]; // Q11 + normTmp = WebRtcSpl_NormU32(postLocSnr[i]); + num = WEBRTC_SPL_LSHIFT_U32(postLocSnr[i], normTmp); // Q(11+normTmp) + if (normTmp > 10) { + den = WEBRTC_SPL_LSHIFT_U32(priorLocSnr[i], normTmp - 11); // Q(normTmp) + } else { + den = WEBRTC_SPL_RSHIFT_U32(priorLocSnr[i], 11 - normTmp); // Q(normTmp) + } + if (den > 0) { + besselTmpFX32 -= WEBRTC_SPL_UDIV(num, den); // Q11 + } else { + besselTmpFX32 -= num; // Q11 + } + + // inst->logLrtTimeAvg[i] += LRT_TAVG * (besselTmp - log(snrLocPrior) + // - inst->logLrtTimeAvg[i]); + // Here, LRT_TAVG = 0.5 + zeros = WebRtcSpl_NormU32(priorLocSnr[i]); + frac32 = (int32_t)(((priorLocSnr[i] << zeros) & 0x7FFFFFFF) >> 19); + tmp32 = WEBRTC_SPL_MUL(frac32, frac32); + tmp32 = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(tmp32, -43), 19); + tmp32 += WEBRTC_SPL_MUL_16_16_RSFT((int16_t)frac32, 5412, 12); + frac32 = tmp32 + 37; + // tmp32 = log2(priorLocSnr[i]) + tmp32 = (int32_t)(((31 - zeros) << 12) + frac32) - (11 << 12); // Q12 + logTmp = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_32_16(tmp32, 178), 8); + // log2(priorLocSnr[i])*log(2) + tmp32no1 = WEBRTC_SPL_RSHIFT_W32(logTmp + inst->logLrtTimeAvgW32[i], 1); + // Q12 + inst->logLrtTimeAvgW32[i] += (besselTmpFX32 - tmp32no1); // Q12 + + logLrtTimeAvgKsumFX += inst->logLrtTimeAvgW32[i]; // Q12 + } + inst->featureLogLrt = WEBRTC_SPL_RSHIFT_W32(logLrtTimeAvgKsumFX * 5, + inst->stages + 10); + // 5 = BIN_SIZE_LRT / 2 + // done with computation of LR factor + + // + //compute the indicator functions + // + + // average LRT feature + // FLOAT code + // indicator0 = 0.5 * (tanh(widthPrior * + // (logLrtTimeAvgKsum - threshPrior0)) + 1.0); + tmpIndFX = 16384; // Q14(1.0) + tmp32no1 = logLrtTimeAvgKsumFX - inst->thresholdLogLrt; // Q12 + nShifts = 7 - inst->stages; // WIDTH_PR_MAP_SHIFT - inst->stages + 5; + //use larger width in tanh map for pause regions + if (tmp32no1 < 0) { + tmpIndFX = 0; + tmp32no1 = -tmp32no1; + //widthPrior = widthPrior * 2.0; + nShifts++; + } + tmp32no1 = WEBRTC_SPL_SHIFT_W32(tmp32no1, nShifts); // Q14 + // compute indicator function: sigmoid map + tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32no1, 14); + if ((tableIndex < 16) && (tableIndex >= 0)) { + tmp16no2 = kIndicatorTable[tableIndex]; + tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex]; + frac = (int16_t)(tmp32no1 & 0x00003fff); // Q14 + tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tmp16no1, frac, 14); + if (tmpIndFX == 0) { + tmpIndFX = 8192 - tmp16no2; // Q14 + } else { + tmpIndFX = 8192 + tmp16no2; // Q14 + } + } + indPriorFX = WEBRTC_SPL_MUL_16_16(inst->weightLogLrt, tmpIndFX); // 6*Q14 + + //spectral flatness feature + if (inst->weightSpecFlat) { + tmpU32no1 = WEBRTC_SPL_UMUL(inst->featureSpecFlat, 400); // Q10 + tmpIndFX = 16384; // Q14(1.0) + //use larger width in tanh map for pause regions + tmpU32no2 = inst->thresholdSpecFlat - tmpU32no1; //Q10 + nShifts = 4; + if (inst->thresholdSpecFlat < tmpU32no1) { + tmpIndFX = 0; + tmpU32no2 = tmpU32no1 - inst->thresholdSpecFlat; + //widthPrior = widthPrior * 2.0; + nShifts++; + } + tmp32no1 = (int32_t)WebRtcSpl_DivU32U16(WEBRTC_SPL_LSHIFT_U32(tmpU32no2, + nShifts), 25); + //Q14 + tmpU32no1 = WebRtcSpl_DivU32U16(WEBRTC_SPL_LSHIFT_U32(tmpU32no2, nShifts), + 25); //Q14 + // compute indicator function: sigmoid map + // FLOAT code + // indicator1 = 0.5 * (tanh(sgnMap * widthPrior * + // (threshPrior1 - tmpFloat1)) + 1.0); + tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 14); + if (tableIndex < 16) { + tmp16no2 = kIndicatorTable[tableIndex]; + tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex]; + frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14 + tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tmp16no1, frac, 14); + if (tmpIndFX) { + tmpIndFX = 8192 + tmp16no2; // Q14 + } else { + tmpIndFX = 8192 - tmp16no2; // Q14 + } + } + indPriorFX += WEBRTC_SPL_MUL_16_16(inst->weightSpecFlat, tmpIndFX); // 6*Q14 + } + + //for template spectral-difference + if (inst->weightSpecDiff) { + tmpU32no1 = 0; + if (inst->featureSpecDiff) { + normTmp = WEBRTC_SPL_MIN(20 - inst->stages, + WebRtcSpl_NormU32(inst->featureSpecDiff)); + tmpU32no1 = WEBRTC_SPL_LSHIFT_U32(inst->featureSpecDiff, normTmp); + // Q(normTmp-2*stages) + tmpU32no2 = WEBRTC_SPL_RSHIFT_U32(inst->timeAvgMagnEnergy, + 20 - inst->stages - normTmp); + if (tmpU32no2 > 0) { + // Q(20 - inst->stages) + tmpU32no1 = WEBRTC_SPL_UDIV(tmpU32no1, tmpU32no2); + } else { + tmpU32no1 = (uint32_t)(0x7fffffff); + } + } + tmpU32no3 = WEBRTC_SPL_UDIV(WEBRTC_SPL_LSHIFT_U32(inst->thresholdSpecDiff, + 17), + 25); + tmpU32no2 = tmpU32no1 - tmpU32no3; + nShifts = 1; + tmpIndFX = 16384; // Q14(1.0) + //use larger width in tanh map for pause regions + if (tmpU32no2 & 0x80000000) { + tmpIndFX = 0; + tmpU32no2 = tmpU32no3 - tmpU32no1; + //widthPrior = widthPrior * 2.0; + nShifts--; + } + tmpU32no1 = WEBRTC_SPL_RSHIFT_U32(tmpU32no2, nShifts); + // compute indicator function: sigmoid map + /* FLOAT code + indicator2 = 0.5 * (tanh(widthPrior * (tmpFloat1 - threshPrior2)) + 1.0); + */ + tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 14); + if (tableIndex < 16) { + tmp16no2 = kIndicatorTable[tableIndex]; + tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex]; + frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14 + tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND( + tmp16no1, frac, 14); + if (tmpIndFX) { + tmpIndFX = 8192 + tmp16no2; + } else { + tmpIndFX = 8192 - tmp16no2; + } + } + indPriorFX += WEBRTC_SPL_MUL_16_16(inst->weightSpecDiff, tmpIndFX); // 6*Q14 + } + + //combine the indicator function with the feature weights + // FLOAT code + // indPrior = 1 - (weightIndPrior0 * indicator0 + weightIndPrior1 * + // indicator1 + weightIndPrior2 * indicator2); + indPriorFX16 = WebRtcSpl_DivW32W16ResW16(98307 - indPriorFX, 6); // Q14 + // done with computing indicator function + + //compute the prior probability + // FLOAT code + // inst->priorNonSpeechProb += PRIOR_UPDATE * + // (indPriorNonSpeech - inst->priorNonSpeechProb); + tmp16 = indPriorFX16 - inst->priorNonSpeechProb; // Q14 + inst->priorNonSpeechProb += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT( + PRIOR_UPDATE_Q14, tmp16, 14); // Q14 + + //final speech probability: combine prior model with LR factor: + + memset(nonSpeechProbFinal, 0, sizeof(uint16_t) * inst->magnLen); + + if (inst->priorNonSpeechProb > 0) { + for (i = 0; i < inst->magnLen; i++) { + // FLOAT code + // invLrt = exp(inst->logLrtTimeAvg[i]); + // invLrt = inst->priorSpeechProb * invLrt; + // nonSpeechProbFinal[i] = (1.0 - inst->priorSpeechProb) / + // (1.0 - inst->priorSpeechProb + invLrt); + // invLrt = (1.0 - inst->priorNonSpeechProb) * invLrt; + // nonSpeechProbFinal[i] = inst->priorNonSpeechProb / + // (inst->priorNonSpeechProb + invLrt); + if (inst->logLrtTimeAvgW32[i] < 65300) { + tmp32no1 = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL( + inst->logLrtTimeAvgW32[i], 23637), + 14); // Q12 + intPart = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32no1, 12); + if (intPart < -8) { + intPart = -8; + } + frac = (int16_t)(tmp32no1 & 0x00000fff); // Q12 + + // Quadratic approximation of 2^frac + tmp32no2 = WEBRTC_SPL_RSHIFT_W32(frac * frac * 44, 19); // Q12 + tmp32no2 += WEBRTC_SPL_MUL_16_16_RSFT(frac, 84, 7); // Q12 + invLrtFX = WEBRTC_SPL_LSHIFT_W32(1, 8 + intPart) + + WEBRTC_SPL_SHIFT_W32(tmp32no2, intPart - 4); // Q8 + + normTmp = WebRtcSpl_NormW32(invLrtFX); + normTmp2 = WebRtcSpl_NormW16((16384 - inst->priorNonSpeechProb)); + if (normTmp + normTmp2 >= 7) { + if (normTmp + normTmp2 < 15) { + invLrtFX = WEBRTC_SPL_RSHIFT_W32(invLrtFX, 15 - normTmp2 - normTmp); + // Q(normTmp+normTmp2-7) + tmp32no1 = WEBRTC_SPL_MUL_32_16(invLrtFX, + (16384 - inst->priorNonSpeechProb)); + // Q(normTmp+normTmp2+7) + invLrtFX = WEBRTC_SPL_SHIFT_W32(tmp32no1, 7 - normTmp - normTmp2); + // Q14 + } else { + tmp32no1 = WEBRTC_SPL_MUL_32_16(invLrtFX, + (16384 - inst->priorNonSpeechProb)); + // Q22 + invLrtFX = WEBRTC_SPL_RSHIFT_W32(tmp32no1, 8); // Q14 + } + + tmp32no1 = WEBRTC_SPL_LSHIFT_W32((int32_t)inst->priorNonSpeechProb, + 8); // Q22 + + nonSpeechProbFinal[i] = (uint16_t)WEBRTC_SPL_DIV(tmp32no1, + (int32_t)inst->priorNonSpeechProb + invLrtFX); // Q8 + } + } + } + } +} + diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/ns/nsx_core_mips.c b/media/webrtc/trunk/webrtc/modules/audio_processing/ns/nsx_core_mips.c new file mode 100644 index 000000000000..ccb0c3763246 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/ns/nsx_core_mips.c @@ -0,0 +1,1008 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/audio_processing/ns/include/noise_suppression_x.h" +#include "webrtc/modules/audio_processing/ns/nsx_core.h" + +static const int16_t kIndicatorTable[17] = { + 0, 2017, 3809, 5227, 6258, 6963, 7424, 7718, + 7901, 8014, 8084, 8126, 8152, 8168, 8177, 8183, 8187 +}; + +// Compute speech/noise probability +// speech/noise probability is returned in: probSpeechFinal +//snrLocPrior is the prior SNR for each frequency (in Q11) +//snrLocPost is the post SNR for each frequency (in Q11) +void WebRtcNsx_SpeechNoiseProb(NsxInst_t* inst, + uint16_t* nonSpeechProbFinal, + uint32_t* priorLocSnr, + uint32_t* postLocSnr) { + + uint32_t tmpU32no1, tmpU32no2, tmpU32no3; + int32_t indPriorFX, tmp32no1; + int32_t logLrtTimeAvgKsumFX; + int16_t indPriorFX16; + int16_t tmp16, tmp16no1, tmp16no2, tmpIndFX, tableIndex, frac; + int i, normTmp, nShifts; + + int32_t r0, r1, r2, r3, r4, r5, r6, r7, r8, r9; + int32_t const_max = 0x7fffffff; + int32_t const_neg43 = -43; + int32_t const_5412 = 5412; + int32_t const_11rsh12 = (11 << 12); + int32_t const_178 = 178; + + + // compute feature based on average LR factor + // this is the average over all frequencies of the smooth log LRT + logLrtTimeAvgKsumFX = 0; + for (i = 0; i < inst->magnLen; i++) { + r0 = postLocSnr[i]; // Q11 + r1 = priorLocSnr[i]; + r2 = inst->logLrtTimeAvgW32[i]; + + __asm __volatile( + ".set push \n\t" + ".set noreorder \n\t" + "clz %[r3], %[r0] \n\t" + "clz %[r5], %[r1] \n\t" + "slti %[r4], %[r3], 32 \n\t" + "slti %[r6], %[r5], 32 \n\t" + "movz %[r3], $0, %[r4] \n\t" + "movz %[r5], $0, %[r6] \n\t" + "slti %[r4], %[r3], 11 \n\t" + "addiu %[r6], %[r3], -11 \n\t" + "neg %[r7], %[r6] \n\t" + "sllv %[r6], %[r1], %[r6] \n\t" + "srav %[r7], %[r1], %[r7] \n\t" + "movn %[r6], %[r7], %[r4] \n\t" + "sllv %[r1], %[r1], %[r5] \n\t" + "and %[r1], %[r1], %[const_max] \n\t" + "sra %[r1], %[r1], 19 \n\t" + "mul %[r7], %[r1], %[r1] \n\t" + "sllv %[r3], %[r0], %[r3] \n\t" + "divu %[r8], %[r3], %[r6] \n\t" + "slti %[r6], %[r6], 1 \n\t" + "mul %[r7], %[r7], %[const_neg43] \n\t" + "sra %[r7], %[r7], 19 \n\t" + "movz %[r3], %[r8], %[r6] \n\t" + "subu %[r0], %[r0], %[r3] \n\t" + "mul %[r1], %[r1], %[const_5412] \n\t" + "sra %[r1], %[r1], 12 \n\t" + "addu %[r7], %[r7], %[r1] \n\t" + "addiu %[r1], %[r7], 37 \n\t" + "addiu %[r5], %[r5], -31 \n\t" + "neg %[r5], %[r5] \n\t" + "sll %[r5], %[r5], 12 \n\t" + "addu %[r5], %[r5], %[r1] \n\t" + "subu %[r7], %[r5], %[const_11rsh12] \n\t" + "mul %[r7], %[r7], %[const_178] \n\t" + "sra %[r7], %[r7], 8 \n\t" + "addu %[r7], %[r7], %[r2] \n\t" + "sra %[r7], %[r7], 1 \n\t" + "subu %[r2], %[r2], %[r7] \n\t" + "addu %[r2], %[r2], %[r0] \n\t" + ".set pop \n\t" + : [r0] "+r" (r0), [r1] "+r" (r1), [r2] "+r" (r2), + [r3] "=&r" (r3), [r4] "=&r" (r4), [r5] "=&r" (r5), + [r6] "=&r" (r6), [r7] "=&r" (r7), [r8] "=&r" (r8) + : [const_max] "r" (const_max), [const_neg43] "r" (const_neg43), + [const_5412] "r" (const_5412), [const_11rsh12] "r" (const_11rsh12), + [const_178] "r" (const_178) + : "hi", "lo" + ); + inst->logLrtTimeAvgW32[i] = r2; + logLrtTimeAvgKsumFX += r2; + } + + inst->featureLogLrt = WEBRTC_SPL_RSHIFT_W32(logLrtTimeAvgKsumFX * 5, + inst->stages + 10); + // 5 = BIN_SIZE_LRT / 2 + // done with computation of LR factor + + // + // compute the indicator functions + // + + // average LRT feature + // FLOAT code + // indicator0 = 0.5 * (tanh(widthPrior * + // (logLrtTimeAvgKsum - threshPrior0)) + 1.0); + tmpIndFX = 16384; // Q14(1.0) + tmp32no1 = logLrtTimeAvgKsumFX - inst->thresholdLogLrt; // Q12 + nShifts = 7 - inst->stages; // WIDTH_PR_MAP_SHIFT - inst->stages + 5; + //use larger width in tanh map for pause regions + if (tmp32no1 < 0) { + tmpIndFX = 0; + tmp32no1 = -tmp32no1; + //widthPrior = widthPrior * 2.0; + nShifts++; + } + tmp32no1 = WEBRTC_SPL_SHIFT_W32(tmp32no1, nShifts); // Q14 + // compute indicator function: sigmoid map + tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32no1, 14); + if ((tableIndex < 16) && (tableIndex >= 0)) { + tmp16no2 = kIndicatorTable[tableIndex]; + tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex]; + frac = (int16_t)(tmp32no1 & 0x00003fff); // Q14 + tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tmp16no1, frac, 14); + if (tmpIndFX == 0) { + tmpIndFX = 8192 - tmp16no2; // Q14 + } else { + tmpIndFX = 8192 + tmp16no2; // Q14 + } + } + indPriorFX = WEBRTC_SPL_MUL_16_16(inst->weightLogLrt, tmpIndFX); // 6*Q14 + + //spectral flatness feature + if (inst->weightSpecFlat) { + tmpU32no1 = WEBRTC_SPL_UMUL(inst->featureSpecFlat, 400); // Q10 + tmpIndFX = 16384; // Q14(1.0) + //use larger width in tanh map for pause regions + tmpU32no2 = inst->thresholdSpecFlat - tmpU32no1; //Q10 + nShifts = 4; + if (inst->thresholdSpecFlat < tmpU32no1) { + tmpIndFX = 0; + tmpU32no2 = tmpU32no1 - inst->thresholdSpecFlat; + //widthPrior = widthPrior * 2.0; + nShifts++; + } + tmp32no1 = (int32_t)WebRtcSpl_DivU32U16(WEBRTC_SPL_LSHIFT_U32(tmpU32no2, + nShifts), 25); + //Q14 + tmpU32no1 = WebRtcSpl_DivU32U16(WEBRTC_SPL_LSHIFT_U32(tmpU32no2, nShifts), + 25); //Q14 + // compute indicator function: sigmoid map + // FLOAT code + // indicator1 = 0.5 * (tanh(sgnMap * widthPrior * + // (threshPrior1 - tmpFloat1)) + 1.0); + tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 14); + if (tableIndex < 16) { + tmp16no2 = kIndicatorTable[tableIndex]; + tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex]; + frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14 + tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tmp16no1, frac, 14); + if (tmpIndFX) { + tmpIndFX = 8192 + tmp16no2; // Q14 + } else { + tmpIndFX = 8192 - tmp16no2; // Q14 + } + } + indPriorFX += WEBRTC_SPL_MUL_16_16(inst->weightSpecFlat, tmpIndFX); // 6*Q14 + } + + //for template spectral-difference + if (inst->weightSpecDiff) { + tmpU32no1 = 0; + if (inst->featureSpecDiff) { + normTmp = WEBRTC_SPL_MIN(20 - inst->stages, + WebRtcSpl_NormU32(inst->featureSpecDiff)); + tmpU32no1 = WEBRTC_SPL_LSHIFT_U32(inst->featureSpecDiff, normTmp); + // Q(normTmp-2*stages) + tmpU32no2 = WEBRTC_SPL_RSHIFT_U32(inst->timeAvgMagnEnergy, + 20 - inst->stages - normTmp); + if (tmpU32no2 > 0) { + // Q(20 - inst->stages) + tmpU32no1 = WEBRTC_SPL_UDIV(tmpU32no1, tmpU32no2); + } else { + tmpU32no1 = (uint32_t)(0x7fffffff); + } + } + tmpU32no3 = WEBRTC_SPL_UDIV(WEBRTC_SPL_LSHIFT_U32(inst->thresholdSpecDiff, + 17), + 25); + tmpU32no2 = tmpU32no1 - tmpU32no3; + nShifts = 1; + tmpIndFX = 16384; // Q14(1.0) + //use larger width in tanh map for pause regions + if (tmpU32no2 & 0x80000000) { + tmpIndFX = 0; + tmpU32no2 = tmpU32no3 - tmpU32no1; + //widthPrior = widthPrior * 2.0; + nShifts--; + } + tmpU32no1 = WEBRTC_SPL_RSHIFT_U32(tmpU32no2, nShifts); + // compute indicator function: sigmoid map + /* FLOAT code + indicator2 = 0.5 * (tanh(widthPrior * (tmpFloat1 - threshPrior2)) + 1.0); + */ + tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 14); + if (tableIndex < 16) { + tmp16no2 = kIndicatorTable[tableIndex]; + tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex]; + frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14 + tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND( + tmp16no1, frac, 14); + if (tmpIndFX) { + tmpIndFX = 8192 + tmp16no2; + } else { + tmpIndFX = 8192 - tmp16no2; + } + } + indPriorFX += WEBRTC_SPL_MUL_16_16(inst->weightSpecDiff, tmpIndFX); // 6*Q14 + } + + //combine the indicator function with the feature weights + // FLOAT code + // indPrior = 1 - (weightIndPrior0 * indicator0 + weightIndPrior1 * + // indicator1 + weightIndPrior2 * indicator2); + indPriorFX16 = WebRtcSpl_DivW32W16ResW16(98307 - indPriorFX, 6); // Q14 + // done with computing indicator function + + //compute the prior probability + // FLOAT code + // inst->priorNonSpeechProb += PRIOR_UPDATE * + // (indPriorNonSpeech - inst->priorNonSpeechProb); + tmp16 = indPriorFX16 - inst->priorNonSpeechProb; // Q14 + inst->priorNonSpeechProb += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT( + PRIOR_UPDATE_Q14, tmp16, 14); // Q14 + + //final speech probability: combine prior model with LR factor: + + memset(nonSpeechProbFinal, 0, sizeof(uint16_t) * inst->magnLen); + + if (inst->priorNonSpeechProb > 0) { + r0 = inst->priorNonSpeechProb; + r1 = 16384 - r0; + int32_t const_23637 = 23637; + int32_t const_44 = 44; + int32_t const_84 = 84; + int32_t const_1 = 1; + int32_t const_neg8 = -8; + for (i = 0; i < inst->magnLen; i++) { + r2 = inst->logLrtTimeAvgW32[i]; + if (r2 < 65300) { + __asm __volatile( + ".set push \n\t" + ".set noreorder \n\t" + "mul %[r2], %[r2], %[const_23637] \n\t" + "sll %[r6], %[r1], 16 \n\t" + "clz %[r7], %[r6] \n\t" + "clo %[r8], %[r6] \n\t" + "slt %[r9], %[r6], $0 \n\t" + "movn %[r7], %[r8], %[r9] \n\t" + "sra %[r2], %[r2], 14 \n\t" + "andi %[r3], %[r2], 0xfff \n\t" + "mul %[r4], %[r3], %[r3] \n\t" + "mul %[r3], %[r3], %[const_84] \n\t" + "sra %[r2], %[r2], 12 \n\t" + "slt %[r5], %[r2], %[const_neg8] \n\t" + "movn %[r2], %[const_neg8], %[r5] \n\t" + "mul %[r4], %[r4], %[const_44] \n\t" + "sra %[r3], %[r3], 7 \n\t" + "addiu %[r7], %[r7], -1 \n\t" + "slti %[r9], %[r7], 31 \n\t" + "movz %[r7], $0, %[r9] \n\t" + "sra %[r4], %[r4], 19 \n\t" + "addu %[r4], %[r4], %[r3] \n\t" + "addiu %[r3], %[r2], 8 \n\t" + "addiu %[r2], %[r2], -4 \n\t" + "neg %[r5], %[r2] \n\t" + "sllv %[r6], %[r4], %[r2] \n\t" + "srav %[r5], %[r4], %[r5] \n\t" + "slt %[r2], %[r2], $0 \n\t" + "movn %[r6], %[r5], %[r2] \n\t" + "sllv %[r3], %[const_1], %[r3] \n\t" + "addu %[r2], %[r3], %[r6] \n\t" + "clz %[r4], %[r2] \n\t" + "clo %[r5], %[r2] \n\t" + "slt %[r8], %[r2], $0 \n\t" + "movn %[r4], %[r5], %[r8] \n\t" + "addiu %[r4], %[r4], -1 \n\t" + "slt %[r5], $0, %[r2] \n\t" + "or %[r5], %[r5], %[r7] \n\t" + "movz %[r4], $0, %[r5] \n\t" + "addiu %[r6], %[r7], -7 \n\t" + "addu %[r6], %[r6], %[r4] \n\t" + "bltz %[r6], 1f \n\t" + " nop \n\t" + "addiu %[r4], %[r6], -8 \n\t" + "neg %[r3], %[r4] \n\t" + "srav %[r5], %[r2], %[r3] \n\t" + "mul %[r5], %[r5], %[r1] \n\t" + "mul %[r2], %[r2], %[r1] \n\t" + "slt %[r4], %[r4], $0 \n\t" + "srav %[r5], %[r5], %[r6] \n\t" + "sra %[r2], %[r2], 8 \n\t" + "movn %[r2], %[r5], %[r4] \n\t" + "sll %[r3], %[r0], 8 \n\t" + "addu %[r2], %[r0], %[r2] \n\t" + "divu %[r3], %[r3], %[r2] \n\t" + "1: \n\t" + ".set pop \n\t" + : [r2] "+r" (r2), [r3] "=&r" (r3), [r4] "=&r" (r4), + [r5] "=&r" (r5), [r6] "=&r" (r6), [r7] "=&r" (r7), + [r8] "=&r" (r8), [r9] "=&r" (r9) + : [r0] "r" (r0), [r1] "r" (r1), [const_23637] "r" (const_23637), + [const_neg8] "r" (const_neg8), [const_84] "r" (const_84), + [const_1] "r" (const_1), [const_44] "r" (const_44) + : "hi", "lo" + ); + nonSpeechProbFinal[i] = r3; + } + } + } +} + +// Update analysis buffer for lower band, and window data before FFT. +void WebRtcNsx_AnalysisUpdate_mips(NsxInst_t* inst, + int16_t* out, + int16_t* new_speech) { + + int iters, after; + int anaLen = inst->anaLen; + int *window = (int*)inst->window; + int *anaBuf = (int*)inst->analysisBuffer; + int *outBuf = (int*)out; + int r0, r1, r2, r3, r4, r5, r6, r7; +#if defined(MIPS_DSP_R1_LE) + int r8; +#endif + + // For lower band update analysis buffer. + WEBRTC_SPL_MEMCPY_W16(inst->analysisBuffer, + inst->analysisBuffer + inst->blockLen10ms, + inst->anaLen - inst->blockLen10ms); + WEBRTC_SPL_MEMCPY_W16(inst->analysisBuffer + + inst->anaLen - inst->blockLen10ms, new_speech, inst->blockLen10ms); + + // Window data before FFT. +#if defined(MIPS_DSP_R1_LE) + __asm __volatile( + ".set push \n\t" + ".set noreorder \n\t" + "sra %[iters], %[anaLen], 3 \n\t" + "1: \n\t" + "blez %[iters], 2f \n\t" + " nop \n\t" + "lw %[r0], 0(%[window]) \n\t" + "lw %[r1], 0(%[anaBuf]) \n\t" + "lw %[r2], 4(%[window]) \n\t" + "lw %[r3], 4(%[anaBuf]) \n\t" + "lw %[r4], 8(%[window]) \n\t" + "lw %[r5], 8(%[anaBuf]) \n\t" + "lw %[r6], 12(%[window]) \n\t" + "lw %[r7], 12(%[anaBuf]) \n\t" + "muleq_s.w.phl %[r8], %[r0], %[r1] \n\t" + "muleq_s.w.phr %[r0], %[r0], %[r1] \n\t" + "muleq_s.w.phl %[r1], %[r2], %[r3] \n\t" + "muleq_s.w.phr %[r2], %[r2], %[r3] \n\t" + "muleq_s.w.phl %[r3], %[r4], %[r5] \n\t" + "muleq_s.w.phr %[r4], %[r4], %[r5] \n\t" + "muleq_s.w.phl %[r5], %[r6], %[r7] \n\t" + "muleq_s.w.phr %[r6], %[r6], %[r7] \n\t" +#if defined(MIPS_DSP_R2_LE) + "precr_sra_r.ph.w %[r8], %[r0], 15 \n\t" + "precr_sra_r.ph.w %[r1], %[r2], 15 \n\t" + "precr_sra_r.ph.w %[r3], %[r4], 15 \n\t" + "precr_sra_r.ph.w %[r5], %[r6], 15 \n\t" + "sw %[r8], 0(%[outBuf]) \n\t" + "sw %[r1], 4(%[outBuf]) \n\t" + "sw %[r3], 8(%[outBuf]) \n\t" + "sw %[r5], 12(%[outBuf]) \n\t" +#else + "shra_r.w %[r8], %[r8], 15 \n\t" + "shra_r.w %[r0], %[r0], 15 \n\t" + "shra_r.w %[r1], %[r1], 15 \n\t" + "shra_r.w %[r2], %[r2], 15 \n\t" + "shra_r.w %[r3], %[r3], 15 \n\t" + "shra_r.w %[r4], %[r4], 15 \n\t" + "shra_r.w %[r5], %[r5], 15 \n\t" + "shra_r.w %[r6], %[r6], 15 \n\t" + "sll %[r0], %[r0], 16 \n\t" + "sll %[r2], %[r2], 16 \n\t" + "sll %[r4], %[r4], 16 \n\t" + "sll %[r6], %[r6], 16 \n\t" + "packrl.ph %[r0], %[r8], %[r0] \n\t" + "packrl.ph %[r2], %[r1], %[r2] \n\t" + "packrl.ph %[r4], %[r3], %[r4] \n\t" + "packrl.ph %[r6], %[r5], %[r6] \n\t" + "sw %[r0], 0(%[outBuf]) \n\t" + "sw %[r2], 4(%[outBuf]) \n\t" + "sw %[r4], 8(%[outBuf]) \n\t" + "sw %[r6], 12(%[outBuf]) \n\t" +#endif + "addiu %[window], %[window], 16 \n\t" + "addiu %[anaBuf], %[anaBuf], 16 \n\t" + "addiu %[outBuf], %[outBuf], 16 \n\t" + "b 1b \n\t" + " addiu %[iters], %[iters], -1 \n\t" + "2: \n\t" + "andi %[after], %[anaLen], 7 \n\t" + "3: \n\t" + "blez %[after], 4f \n\t" + " nop \n\t" + "lh %[r0], 0(%[window]) \n\t" + "lh %[r1], 0(%[anaBuf]) \n\t" + "mul %[r0], %[r0], %[r1] \n\t" + "addiu %[window], %[window], 2 \n\t" + "addiu %[anaBuf], %[anaBuf], 2 \n\t" + "addiu %[outBuf], %[outBuf], 2 \n\t" + "shra_r.w %[r0], %[r0], 14 \n\t" + "sh %[r0], -2(%[outBuf]) \n\t" + "b 3b \n\t" + " addiu %[after], %[after], -1 \n\t" + "4: \n\t" + ".set pop \n\t" + : [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), + [r3] "=&r" (r3), [r4] "=&r" (r4), [r5] "=&r" (r5), + [r6] "=&r" (r6), [r7] "=&r" (r7), [r8] "=&r" (r8), + [iters] "=&r" (iters), [after] "=&r" (after), + [window] "+r" (window),[anaBuf] "+r" (anaBuf), + [outBuf] "+r" (outBuf) + : [anaLen] "r" (anaLen) + : "memory", "hi", "lo" + ); +#else + __asm __volatile( + ".set push \n\t" + ".set noreorder \n\t" + "sra %[iters], %[anaLen], 2 \n\t" + "1: \n\t" + "blez %[iters], 2f \n\t" + " nop \n\t" + "lh %[r0], 0(%[window]) \n\t" + "lh %[r1], 0(%[anaBuf]) \n\t" + "lh %[r2], 2(%[window]) \n\t" + "lh %[r3], 2(%[anaBuf]) \n\t" + "lh %[r4], 4(%[window]) \n\t" + "lh %[r5], 4(%[anaBuf]) \n\t" + "lh %[r6], 6(%[window]) \n\t" + "lh %[r7], 6(%[anaBuf]) \n\t" + "mul %[r0], %[r0], %[r1] \n\t" + "mul %[r2], %[r2], %[r3] \n\t" + "mul %[r4], %[r4], %[r5] \n\t" + "mul %[r6], %[r6], %[r7] \n\t" + "addiu %[window], %[window], 8 \n\t" + "addiu %[anaBuf], %[anaBuf], 8 \n\t" + "addiu %[r0], %[r0], 0x2000 \n\t" + "addiu %[r2], %[r2], 0x2000 \n\t" + "addiu %[r4], %[r4], 0x2000 \n\t" + "addiu %[r6], %[r6], 0x2000 \n\t" + "sra %[r0], %[r0], 14 \n\t" + "sra %[r2], %[r2], 14 \n\t" + "sra %[r4], %[r4], 14 \n\t" + "sra %[r6], %[r6], 14 \n\t" + "sh %[r0], 0(%[outBuf]) \n\t" + "sh %[r2], 2(%[outBuf]) \n\t" + "sh %[r4], 4(%[outBuf]) \n\t" + "sh %[r6], 6(%[outBuf]) \n\t" + "addiu %[outBuf], %[outBuf], 8 \n\t" + "b 1b \n\t" + " addiu %[iters], %[iters], -1 \n\t" + "2: \n\t" + "andi %[after], %[anaLen], 3 \n\t" + "3: \n\t" + "blez %[after], 4f \n\t" + " nop \n\t" + "lh %[r0], 0(%[window]) \n\t" + "lh %[r1], 0(%[anaBuf]) \n\t" + "mul %[r0], %[r0], %[r1] \n\t" + "addiu %[window], %[window], 2 \n\t" + "addiu %[anaBuf], %[anaBuf], 2 \n\t" + "addiu %[outBuf], %[outBuf], 2 \n\t" + "addiu %[r0], %[r0], 0x2000 \n\t" + "sra %[r0], %[r0], 14 \n\t" + "sh %[r0], -2(%[outBuf]) \n\t" + "b 3b \n\t" + " addiu %[after], %[after], -1 \n\t" + "4: \n\t" + ".set pop \n\t" + : [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), + [r3] "=&r" (r3), [r4] "=&r" (r4), [r5] "=&r" (r5), + [r6] "=&r" (r6), [r7] "=&r" (r7), [iters] "=&r" (iters), + [after] "=&r" (after), [window] "+r" (window), + [anaBuf] "+r" (anaBuf), [outBuf] "+r" (outBuf) + : [anaLen] "r" (anaLen) + : "memory", "hi", "lo" + ); +#endif +} + +// For the noise supression process, synthesis, read out fully processed +// segment, and update synthesis buffer. +void WebRtcNsx_SynthesisUpdate_mips(NsxInst_t* inst, + int16_t* out_frame, + int16_t gain_factor) { + + int iters = inst->blockLen10ms >> 2; + int after = inst->blockLen10ms & 3; + int r0, r1, r2, r3, r4, r5, r6, r7; + int16_t *window = (int16_t*)inst->window; + int16_t *real = inst->real; + int16_t *synthBuf = inst->synthesisBuffer; + int16_t *out = out_frame; + int sat_pos = 0x7fff; + int sat_neg = 0xffff8000; + int block10 = (int)inst->blockLen10ms; + int anaLen = (int)inst->anaLen; + + __asm __volatile( + ".set push \n\t" + ".set noreorder \n\t" + "1: \n\t" + "blez %[iters], 2f \n\t" + " nop \n\t" + "lh %[r0], 0(%[window]) \n\t" + "lh %[r1], 0(%[real]) \n\t" + "lh %[r2], 2(%[window]) \n\t" + "lh %[r3], 2(%[real]) \n\t" + "lh %[r4], 4(%[window]) \n\t" + "lh %[r5], 4(%[real]) \n\t" + "lh %[r6], 6(%[window]) \n\t" + "lh %[r7], 6(%[real]) \n\t" + "mul %[r0], %[r0], %[r1] \n\t" + "mul %[r2], %[r2], %[r3] \n\t" + "mul %[r4], %[r4], %[r5] \n\t" + "mul %[r6], %[r6], %[r7] \n\t" + "addiu %[r0], %[r0], 0x2000 \n\t" + "addiu %[r2], %[r2], 0x2000 \n\t" + "addiu %[r4], %[r4], 0x2000 \n\t" + "addiu %[r6], %[r6], 0x2000 \n\t" + "sra %[r0], %[r0], 14 \n\t" + "sra %[r2], %[r2], 14 \n\t" + "sra %[r4], %[r4], 14 \n\t" + "sra %[r6], %[r6], 14 \n\t" + "mul %[r0], %[r0], %[gain_factor] \n\t" + "mul %[r2], %[r2], %[gain_factor] \n\t" + "mul %[r4], %[r4], %[gain_factor] \n\t" + "mul %[r6], %[r6], %[gain_factor] \n\t" + "addiu %[r0], %[r0], 0x1000 \n\t" + "addiu %[r2], %[r2], 0x1000 \n\t" + "addiu %[r4], %[r4], 0x1000 \n\t" + "addiu %[r6], %[r6], 0x1000 \n\t" + "sra %[r0], %[r0], 13 \n\t" + "sra %[r2], %[r2], 13 \n\t" + "sra %[r4], %[r4], 13 \n\t" + "sra %[r6], %[r6], 13 \n\t" + "slt %[r1], %[r0], %[sat_pos] \n\t" + "slt %[r3], %[r2], %[sat_pos] \n\t" + "slt %[r5], %[r4], %[sat_pos] \n\t" + "slt %[r7], %[r6], %[sat_pos] \n\t" + "movz %[r0], %[sat_pos], %[r1] \n\t" + "movz %[r2], %[sat_pos], %[r3] \n\t" + "movz %[r4], %[sat_pos], %[r5] \n\t" + "movz %[r6], %[sat_pos], %[r7] \n\t" + "lh %[r1], 0(%[synthBuf]) \n\t" + "lh %[r3], 2(%[synthBuf]) \n\t" + "lh %[r5], 4(%[synthBuf]) \n\t" + "lh %[r7], 6(%[synthBuf]) \n\t" + "addu %[r0], %[r0], %[r1] \n\t" + "addu %[r2], %[r2], %[r3] \n\t" + "addu %[r4], %[r4], %[r5] \n\t" + "addu %[r6], %[r6], %[r7] \n\t" + "slt %[r1], %[r0], %[sat_pos] \n\t" + "slt %[r3], %[r2], %[sat_pos] \n\t" + "slt %[r5], %[r4], %[sat_pos] \n\t" + "slt %[r7], %[r6], %[sat_pos] \n\t" + "movz %[r0], %[sat_pos], %[r1] \n\t" + "movz %[r2], %[sat_pos], %[r3] \n\t" + "movz %[r4], %[sat_pos], %[r5] \n\t" + "movz %[r6], %[sat_pos], %[r7] \n\t" + "slt %[r1], %[r0], %[sat_neg] \n\t" + "slt %[r3], %[r2], %[sat_neg] \n\t" + "slt %[r5], %[r4], %[sat_neg] \n\t" + "slt %[r7], %[r6], %[sat_neg] \n\t" + "movn %[r0], %[sat_neg], %[r1] \n\t" + "movn %[r2], %[sat_neg], %[r3] \n\t" + "movn %[r4], %[sat_neg], %[r5] \n\t" + "movn %[r6], %[sat_neg], %[r7] \n\t" + "sh %[r0], 0(%[synthBuf]) \n\t" + "sh %[r2], 2(%[synthBuf]) \n\t" + "sh %[r4], 4(%[synthBuf]) \n\t" + "sh %[r6], 6(%[synthBuf]) \n\t" + "sh %[r0], 0(%[out]) \n\t" + "sh %[r2], 2(%[out]) \n\t" + "sh %[r4], 4(%[out]) \n\t" + "sh %[r6], 6(%[out]) \n\t" + "addiu %[window], %[window], 8 \n\t" + "addiu %[real], %[real], 8 \n\t" + "addiu %[synthBuf],%[synthBuf], 8 \n\t" + "addiu %[out], %[out], 8 \n\t" + "b 1b \n\t" + " addiu %[iters], %[iters], -1 \n\t" + "2: \n\t" + "blez %[after], 3f \n\t" + " subu %[block10], %[anaLen], %[block10] \n\t" + "lh %[r0], 0(%[window]) \n\t" + "lh %[r1], 0(%[real]) \n\t" + "mul %[r0], %[r0], %[r1] \n\t" + "addiu %[window], %[window], 2 \n\t" + "addiu %[real], %[real], 2 \n\t" + "addiu %[r0], %[r0], 0x2000 \n\t" + "sra %[r0], %[r0], 14 \n\t" + "mul %[r0], %[r0], %[gain_factor] \n\t" + "addiu %[r0], %[r0], 0x1000 \n\t" + "sra %[r0], %[r0], 13 \n\t" + "slt %[r1], %[r0], %[sat_pos] \n\t" + "movz %[r0], %[sat_pos], %[r1] \n\t" + "lh %[r1], 0(%[synthBuf]) \n\t" + "addu %[r0], %[r0], %[r1] \n\t" + "slt %[r1], %[r0], %[sat_pos] \n\t" + "movz %[r0], %[sat_pos], %[r1] \n\t" + "slt %[r1], %[r0], %[sat_neg] \n\t" + "movn %[r0], %[sat_neg], %[r1] \n\t" + "sh %[r0], 0(%[synthBuf]) \n\t" + "sh %[r0], 0(%[out]) \n\t" + "addiu %[synthBuf],%[synthBuf], 2 \n\t" + "addiu %[out], %[out], 2 \n\t" + "b 2b \n\t" + " addiu %[after], %[after], -1 \n\t" + "3: \n\t" + "sra %[iters], %[block10], 2 \n\t" + "4: \n\t" + "blez %[iters], 5f \n\t" + " andi %[after], %[block10], 3 \n\t" + "lh %[r0], 0(%[window]) \n\t" + "lh %[r1], 0(%[real]) \n\t" + "lh %[r2], 2(%[window]) \n\t" + "lh %[r3], 2(%[real]) \n\t" + "lh %[r4], 4(%[window]) \n\t" + "lh %[r5], 4(%[real]) \n\t" + "lh %[r6], 6(%[window]) \n\t" + "lh %[r7], 6(%[real]) \n\t" + "mul %[r0], %[r0], %[r1] \n\t" + "mul %[r2], %[r2], %[r3] \n\t" + "mul %[r4], %[r4], %[r5] \n\t" + "mul %[r6], %[r6], %[r7] \n\t" + "addiu %[r0], %[r0], 0x2000 \n\t" + "addiu %[r2], %[r2], 0x2000 \n\t" + "addiu %[r4], %[r4], 0x2000 \n\t" + "addiu %[r6], %[r6], 0x2000 \n\t" + "sra %[r0], %[r0], 14 \n\t" + "sra %[r2], %[r2], 14 \n\t" + "sra %[r4], %[r4], 14 \n\t" + "sra %[r6], %[r6], 14 \n\t" + "mul %[r0], %[r0], %[gain_factor] \n\t" + "mul %[r2], %[r2], %[gain_factor] \n\t" + "mul %[r4], %[r4], %[gain_factor] \n\t" + "mul %[r6], %[r6], %[gain_factor] \n\t" + "addiu %[r0], %[r0], 0x1000 \n\t" + "addiu %[r2], %[r2], 0x1000 \n\t" + "addiu %[r4], %[r4], 0x1000 \n\t" + "addiu %[r6], %[r6], 0x1000 \n\t" + "sra %[r0], %[r0], 13 \n\t" + "sra %[r2], %[r2], 13 \n\t" + "sra %[r4], %[r4], 13 \n\t" + "sra %[r6], %[r6], 13 \n\t" + "slt %[r1], %[r0], %[sat_pos] \n\t" + "slt %[r3], %[r2], %[sat_pos] \n\t" + "slt %[r5], %[r4], %[sat_pos] \n\t" + "slt %[r7], %[r6], %[sat_pos] \n\t" + "movz %[r0], %[sat_pos], %[r1] \n\t" + "movz %[r2], %[sat_pos], %[r3] \n\t" + "movz %[r4], %[sat_pos], %[r5] \n\t" + "movz %[r6], %[sat_pos], %[r7] \n\t" + "lh %[r1], 0(%[synthBuf]) \n\t" + "lh %[r3], 2(%[synthBuf]) \n\t" + "lh %[r5], 4(%[synthBuf]) \n\t" + "lh %[r7], 6(%[synthBuf]) \n\t" + "addu %[r0], %[r0], %[r1] \n\t" + "addu %[r2], %[r2], %[r3] \n\t" + "addu %[r4], %[r4], %[r5] \n\t" + "addu %[r6], %[r6], %[r7] \n\t" + "slt %[r1], %[r0], %[sat_pos] \n\t" + "slt %[r3], %[r2], %[sat_pos] \n\t" + "slt %[r5], %[r4], %[sat_pos] \n\t" + "slt %[r7], %[r6], %[sat_pos] \n\t" + "movz %[r0], %[sat_pos], %[r1] \n\t" + "movz %[r2], %[sat_pos], %[r3] \n\t" + "movz %[r4], %[sat_pos], %[r5] \n\t" + "movz %[r6], %[sat_pos], %[r7] \n\t" + "slt %[r1], %[r0], %[sat_neg] \n\t" + "slt %[r3], %[r2], %[sat_neg] \n\t" + "slt %[r5], %[r4], %[sat_neg] \n\t" + "slt %[r7], %[r6], %[sat_neg] \n\t" + "movn %[r0], %[sat_neg], %[r1] \n\t" + "movn %[r2], %[sat_neg], %[r3] \n\t" + "movn %[r4], %[sat_neg], %[r5] \n\t" + "movn %[r6], %[sat_neg], %[r7] \n\t" + "sh %[r0], 0(%[synthBuf]) \n\t" + "sh %[r2], 2(%[synthBuf]) \n\t" + "sh %[r4], 4(%[synthBuf]) \n\t" + "sh %[r6], 6(%[synthBuf]) \n\t" + "addiu %[window], %[window], 8 \n\t" + "addiu %[real], %[real], 8 \n\t" + "addiu %[synthBuf],%[synthBuf], 8 \n\t" + "b 4b \n\t" + " addiu %[iters], %[iters], -1 \n\t" + "5: \n\t" + "blez %[after], 6f \n\t" + " nop \n\t" + "lh %[r0], 0(%[window]) \n\t" + "lh %[r1], 0(%[real]) \n\t" + "mul %[r0], %[r0], %[r1] \n\t" + "addiu %[window], %[window], 2 \n\t" + "addiu %[real], %[real], 2 \n\t" + "addiu %[r0], %[r0], 0x2000 \n\t" + "sra %[r0], %[r0], 14 \n\t" + "mul %[r0], %[r0], %[gain_factor] \n\t" + "addiu %[r0], %[r0], 0x1000 \n\t" + "sra %[r0], %[r0], 13 \n\t" + "slt %[r1], %[r0], %[sat_pos] \n\t" + "movz %[r0], %[sat_pos], %[r1] \n\t" + "lh %[r1], 0(%[synthBuf]) \n\t" + "addu %[r0], %[r0], %[r1] \n\t" + "slt %[r1], %[r0], %[sat_pos] \n\t" + "movz %[r0], %[sat_pos], %[r1] \n\t" + "slt %[r1], %[r0], %[sat_neg] \n\t" + "movn %[r0], %[sat_neg], %[r1] \n\t" + "sh %[r0], 0(%[synthBuf]) \n\t" + "addiu %[synthBuf],%[synthBuf], 2 \n\t" + "b 2b \n\t" + " addiu %[after], %[after], -1 \n\t" + "6: \n\t" + ".set pop \n\t" + : [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), + [r3] "=&r" (r3), [r4] "=&r" (r4), [r5] "=&r" (r5), + [r6] "=&r" (r6), [r7] "=&r" (r7), [iters] "+r" (iters), + [after] "+r" (after), [block10] "+r" (block10), + [window] "+r" (window), [real] "+r" (real), + [synthBuf] "+r" (synthBuf), [out] "+r" (out) + : [gain_factor] "r" (gain_factor), [sat_pos] "r" (sat_pos), + [sat_neg] "r" (sat_neg), [anaLen] "r" (anaLen) + : "memory", "hi", "lo" + ); + + // update synthesis buffer + WEBRTC_SPL_MEMCPY_W16(inst->synthesisBuffer, + inst->synthesisBuffer + inst->blockLen10ms, + inst->anaLen - inst->blockLen10ms); + WebRtcSpl_ZerosArrayW16(inst->synthesisBuffer + + inst->anaLen - inst->blockLen10ms, inst->blockLen10ms); +} + +// Filter the data in the frequency domain, and create spectrum. +void WebRtcNsx_PrepareSpectrum_mips(NsxInst_t* inst, int16_t* freq_buf) { + + uint16_t *noiseSupFilter = inst->noiseSupFilter; + int16_t *real = inst->real; + int16_t *imag = inst->imag; + int32_t loop_count = 2; + int16_t tmp_1, tmp_2, tmp_3, tmp_4, tmp_5, tmp_6; + int16_t tmp16 = (inst->anaLen << 1) - 4; + int16_t* freq_buf_f = freq_buf; + int16_t* freq_buf_s = &freq_buf[tmp16]; + + __asm __volatile ( + ".set push \n\t" + ".set noreorder \n\t" + //first sample + "lh %[tmp_1], 0(%[noiseSupFilter]) \n\t" + "lh %[tmp_2], 0(%[real]) \n\t" + "lh %[tmp_3], 0(%[imag]) \n\t" + "mul %[tmp_2], %[tmp_2], %[tmp_1] \n\t" + "mul %[tmp_3], %[tmp_3], %[tmp_1] \n\t" + "sra %[tmp_2], %[tmp_2], 14 \n\t" + "sra %[tmp_3], %[tmp_3], 14 \n\t" + "sh %[tmp_2], 0(%[real]) \n\t" + "sh %[tmp_3], 0(%[imag]) \n\t" + "negu %[tmp_3], %[tmp_3] \n\t" + "sh %[tmp_2], 0(%[freq_buf_f]) \n\t" + "sh %[tmp_3], 2(%[freq_buf_f]) \n\t" + "addiu %[real], %[real], 2 \n\t" + "addiu %[imag], %[imag], 2 \n\t" + "addiu %[noiseSupFilter], %[noiseSupFilter], 2 \n\t" + "addiu %[freq_buf_f], %[freq_buf_f], 4 \n\t" + "1: \n\t" + "lh %[tmp_1], 0(%[noiseSupFilter]) \n\t" + "lh %[tmp_2], 0(%[real]) \n\t" + "lh %[tmp_3], 0(%[imag]) \n\t" + "lh %[tmp_4], 2(%[noiseSupFilter]) \n\t" + "lh %[tmp_5], 2(%[real]) \n\t" + "lh %[tmp_6], 2(%[imag]) \n\t" + "mul %[tmp_2], %[tmp_2], %[tmp_1] \n\t" + "mul %[tmp_3], %[tmp_3], %[tmp_1] \n\t" + "mul %[tmp_5], %[tmp_5], %[tmp_4] \n\t" + "mul %[tmp_6], %[tmp_6], %[tmp_4] \n\t" + "addiu %[loop_count], %[loop_count], 2 \n\t" + "sra %[tmp_2], %[tmp_2], 14 \n\t" + "sra %[tmp_3], %[tmp_3], 14 \n\t" + "sra %[tmp_5], %[tmp_5], 14 \n\t" + "sra %[tmp_6], %[tmp_6], 14 \n\t" + "addiu %[noiseSupFilter], %[noiseSupFilter], 4 \n\t" + "sh %[tmp_2], 0(%[real]) \n\t" + "sh %[tmp_2], 4(%[freq_buf_s]) \n\t" + "sh %[tmp_3], 0(%[imag]) \n\t" + "sh %[tmp_3], 6(%[freq_buf_s]) \n\t" + "negu %[tmp_3], %[tmp_3] \n\t" + "sh %[tmp_5], 2(%[real]) \n\t" + "sh %[tmp_5], 0(%[freq_buf_s]) \n\t" + "sh %[tmp_6], 2(%[imag]) \n\t" + "sh %[tmp_6], 2(%[freq_buf_s]) \n\t" + "negu %[tmp_6], %[tmp_6] \n\t" + "addiu %[freq_buf_s], %[freq_buf_s], -8 \n\t" + "addiu %[real], %[real], 4 \n\t" + "addiu %[imag], %[imag], 4 \n\t" + "sh %[tmp_2], 0(%[freq_buf_f]) \n\t" + "sh %[tmp_3], 2(%[freq_buf_f]) \n\t" + "sh %[tmp_5], 4(%[freq_buf_f]) \n\t" + "sh %[tmp_6], 6(%[freq_buf_f]) \n\t" + "blt %[loop_count], %[loop_size], 1b \n\t" + " addiu %[freq_buf_f], %[freq_buf_f], 8 \n\t" + //last two samples: + "lh %[tmp_1], 0(%[noiseSupFilter]) \n\t" + "lh %[tmp_2], 0(%[real]) \n\t" + "lh %[tmp_3], 0(%[imag]) \n\t" + "lh %[tmp_4], 2(%[noiseSupFilter]) \n\t" + "lh %[tmp_5], 2(%[real]) \n\t" + "lh %[tmp_6], 2(%[imag]) \n\t" + "mul %[tmp_2], %[tmp_2], %[tmp_1] \n\t" + "mul %[tmp_3], %[tmp_3], %[tmp_1] \n\t" + "mul %[tmp_5], %[tmp_5], %[tmp_4] \n\t" + "mul %[tmp_6], %[tmp_6], %[tmp_4] \n\t" + "sra %[tmp_2], %[tmp_2], 14 \n\t" + "sra %[tmp_3], %[tmp_3], 14 \n\t" + "sra %[tmp_5], %[tmp_5], 14 \n\t" + "sra %[tmp_6], %[tmp_6], 14 \n\t" + "sh %[tmp_2], 0(%[real]) \n\t" + "sh %[tmp_2], 4(%[freq_buf_s]) \n\t" + "sh %[tmp_3], 0(%[imag]) \n\t" + "sh %[tmp_3], 6(%[freq_buf_s]) \n\t" + "negu %[tmp_3], %[tmp_3] \n\t" + "sh %[tmp_2], 0(%[freq_buf_f]) \n\t" + "sh %[tmp_3], 2(%[freq_buf_f]) \n\t" + "sh %[tmp_5], 4(%[freq_buf_f]) \n\t" + "sh %[tmp_6], 6(%[freq_buf_f]) \n\t" + "sh %[tmp_5], 2(%[real]) \n\t" + "sh %[tmp_6], 2(%[imag]) \n\t" + ".set pop \n\t" + : [real] "+r" (real), [imag] "+r" (imag), + [freq_buf_f] "+r" (freq_buf_f), [freq_buf_s] "+r" (freq_buf_s), + [loop_count] "+r" (loop_count), [noiseSupFilter] "+r" (noiseSupFilter), + [tmp_1] "=&r" (tmp_1), [tmp_2] "=&r" (tmp_2), [tmp_3] "=&r" (tmp_3), + [tmp_4] "=&r" (tmp_4), [tmp_5] "=&r" (tmp_5), [tmp_6] "=&r" (tmp_6) + : [loop_size] "r" (inst->anaLen2) + : "memory", "hi", "lo" + ); +} + +#if defined(MIPS_DSP_R1_LE) +// Denormalize the real-valued signal |in|, the output from inverse FFT. +void WebRtcNsx_Denormalize_mips(NsxInst_t* inst, int16_t* in, int factor) { + int32_t r0, r1, r2, r3, t0; + int len = inst->anaLen; + int16_t *out = &inst->real[0]; + int shift = factor - inst->normData; + + __asm __volatile ( + ".set push \n\t" + ".set noreorder \n\t" + "beqz %[len], 8f \n\t" + " nop \n\t" + "bltz %[shift], 4f \n\t" + " sra %[t0], %[len], 2 \n\t" + "beqz %[t0], 2f \n\t" + " andi %[len], %[len], 3 \n\t" + "1: \n\t" + "lh %[r0], 0(%[in]) \n\t" + "lh %[r1], 2(%[in]) \n\t" + "lh %[r2], 4(%[in]) \n\t" + "lh %[r3], 6(%[in]) \n\t" + "shllv_s.ph %[r0], %[r0], %[shift] \n\t" + "shllv_s.ph %[r1], %[r1], %[shift] \n\t" + "shllv_s.ph %[r2], %[r2], %[shift] \n\t" + "shllv_s.ph %[r3], %[r3], %[shift] \n\t" + "addiu %[in], %[in], 8 \n\t" + "addiu %[t0], %[t0], -1 \n\t" + "sh %[r0], 0(%[out]) \n\t" + "sh %[r1], 2(%[out]) \n\t" + "sh %[r2], 4(%[out]) \n\t" + "sh %[r3], 6(%[out]) \n\t" + "bgtz %[t0], 1b \n\t" + " addiu %[out], %[out], 8 \n\t" + "2: \n\t" + "beqz %[len], 8f \n\t" + " nop \n\t" + "3: \n\t" + "lh %[r0], 0(%[in]) \n\t" + "addiu %[in], %[in], 2 \n\t" + "addiu %[len], %[len], -1 \n\t" + "shllv_s.ph %[r0], %[r0], %[shift] \n\t" + "addiu %[out], %[out], 2 \n\t" + "bgtz %[len], 3b \n\t" + " sh %[r0], -2(%[out]) \n\t" + "b 8f \n\t" + "4: \n\t" + "negu %[shift], %[shift] \n\t" + "beqz %[t0], 6f \n\t" + " andi %[len], %[len], 3 \n\t" + "5: \n\t" + "lh %[r0], 0(%[in]) \n\t" + "lh %[r1], 2(%[in]) \n\t" + "lh %[r2], 4(%[in]) \n\t" + "lh %[r3], 6(%[in]) \n\t" + "srav %[r0], %[r0], %[shift] \n\t" + "srav %[r1], %[r1], %[shift] \n\t" + "srav %[r2], %[r2], %[shift] \n\t" + "srav %[r3], %[r3], %[shift] \n\t" + "addiu %[in], %[in], 8 \n\t" + "addiu %[t0], %[t0], -1 \n\t" + "sh %[r0], 0(%[out]) \n\t" + "sh %[r1], 2(%[out]) \n\t" + "sh %[r2], 4(%[out]) \n\t" + "sh %[r3], 6(%[out]) \n\t" + "bgtz %[t0], 5b \n\t" + " addiu %[out], %[out], 8 \n\t" + "6: \n\t" + "beqz %[len], 8f \n\t" + " nop \n\t" + "7: \n\t" + "lh %[r0], 0(%[in]) \n\t" + "addiu %[in], %[in], 2 \n\t" + "addiu %[len], %[len], -1 \n\t" + "srav %[r0], %[r0], %[shift] \n\t" + "addiu %[out], %[out], 2 \n\t" + "bgtz %[len], 7b \n\t" + " sh %[r0], -2(%[out]) \n\t" + "8: \n\t" + ".set pop \n\t" + : [t0] "=&r" (t0), [r0] "=&r" (r0), [r1] "=&r" (r1), + [r2] "=&r" (r2), [r3] "=&r" (r3) + : [len] "r" (len), [shift] "r" (shift), [in] "r" (in), + [out] "r" (out) + : "memory" + ); +} +#endif + +// Normalize the real-valued signal |in|, the input to forward FFT. +void WebRtcNsx_NormalizeRealBuffer_mips(NsxInst_t* inst, + const int16_t* in, + int16_t* out) { + int32_t r0, r1, r2, r3, t0; + int len = inst->anaLen; + int shift = inst->normData; + + __asm __volatile ( + ".set push \n\t" + ".set noreorder \n\t" + "beqz %[len], 4f \n\t" + " sra %[t0], %[len], 2 \n\t" + "beqz %[t0], 2f \n\t" + " andi %[len], %[len], 3 \n\t" + "1: \n\t" + "lh %[r0], 0(%[in]) \n\t" + "lh %[r1], 2(%[in]) \n\t" + "lh %[r2], 4(%[in]) \n\t" + "lh %[r3], 6(%[in]) \n\t" + "sllv %[r0], %[r0], %[shift] \n\t" + "sllv %[r1], %[r1], %[shift] \n\t" + "sllv %[r2], %[r2], %[shift] \n\t" + "sllv %[r3], %[r3], %[shift] \n\t" + "addiu %[in], %[in], 8 \n\t" + "addiu %[t0], %[t0], -1 \n\t" + "sh %[r0], 0(%[out]) \n\t" + "sh %[r1], 2(%[out]) \n\t" + "sh %[r2], 4(%[out]) \n\t" + "sh %[r3], 6(%[out]) \n\t" + "bgtz %[t0], 1b \n\t" + " addiu %[out], %[out], 8 \n\t" + "2: \n\t" + "beqz %[len], 4f \n\t" + " nop \n\t" + "3: \n\t" + "lh %[r0], 0(%[in]) \n\t" + "addiu %[in], %[in], 2 \n\t" + "addiu %[len], %[len], -1 \n\t" + "sllv %[r0], %[r0], %[shift] \n\t" + "addiu %[out], %[out], 2 \n\t" + "bgtz %[len], 3b \n\t" + " sh %[r0], -2(%[out]) \n\t" + "4: \n\t" + ".set pop \n\t" + : [t0] "=&r" (t0), [r0] "=&r" (r0), [r1] "=&r" (r1), + [r2] "=&r" (r2), [r3] "=&r" (r3) + : [len] "r" (len), [shift] "r" (shift), [in] "r" (in), + [out] "r" (out) + : "memory" + ); +} + diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/splitting_filter.cc b/media/webrtc/trunk/webrtc/modules/audio_processing/splitting_filter.cc deleted file mode 100644 index 372c8dc426ec..000000000000 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/splitting_filter.cc +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h" -#include "webrtc/modules/audio_processing/splitting_filter.h" - -namespace webrtc { - -void SplittingFilterAnalysis(const int16_t* in_data, - int16_t* low_band, - int16_t* high_band, - int32_t* filter_state1, - int32_t* filter_state2) -{ - WebRtcSpl_AnalysisQMF(in_data, low_band, high_band, filter_state1, filter_state2); -} - -void SplittingFilterSynthesis(const int16_t* low_band, - const int16_t* high_band, - int16_t* out_data, - int32_t* filt_state1, - int32_t* filt_state2) -{ - WebRtcSpl_SynthesisQMF(low_band, high_band, out_data, filt_state1, filt_state2); -} -} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/splitting_filter.h b/media/webrtc/trunk/webrtc/modules/audio_processing/splitting_filter.h deleted file mode 100644 index b6c851273a66..000000000000 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/splitting_filter.h +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_SPLITTING_FILTER_H_ -#define WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_SPLITTING_FILTER_H_ - -#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h" -#include "webrtc/typedefs.h" - -namespace webrtc { -/* - * SplittingFilterbank_analysisQMF(...) - * - * Splits a super-wb signal into two subbands: 0-8 kHz and 8-16 kHz. - * - * Input: - * - in_data : super-wb audio signal - * - * Input & Output: - * - filt_state1: Filter state for first all-pass filter - * - filt_state2: Filter state for second all-pass filter - * - * Output: - * - low_band : The signal from the 0-4 kHz band - * - high_band : The signal from the 4-8 kHz band - */ -void SplittingFilterAnalysis(const int16_t* in_data, - int16_t* low_band, - int16_t* high_band, - int32_t* filt_state1, - int32_t* filt_state2); - -/* - * SplittingFilterbank_synthesisQMF(...) - * - * Combines the two subbands (0-8 and 8-16 kHz) into a super-wb signal. - * - * Input: - * - low_band : The signal with the 0-8 kHz band - * - high_band : The signal with the 8-16 kHz band - * - * Input & Output: - * - filt_state1: Filter state for first all-pass filter - * - filt_state2: Filter state for second all-pass filter - * - * Output: - * - out_data : super-wb speech signal - */ -void SplittingFilterSynthesis(const int16_t* low_band, - const int16_t* high_band, - int16_t* out_data, - int32_t* filt_state1, - int32_t* filt_state2); -} // namespace webrtc - -#endif // WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_SPLITTING_FILTER_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/test/audio_processing_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_processing/test/audio_processing_unittest.cc index d8870a527520..5e66c60a03ba 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/test/audio_processing_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/test/audio_processing_unittest.cc @@ -15,10 +15,10 @@ #include "webrtc/common_audio/signal_processing/include/signal_processing_library.h" #include "webrtc/modules/audio_processing/include/audio_processing.h" +#include "webrtc/modules/audio_processing/test/test_utils.h" #include "webrtc/modules/interface/module_common_types.h" #include "webrtc/system_wrappers/interface/event_wrapper.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" -#include "webrtc/system_wrappers/interface/thread_wrapper.h" #include "webrtc/system_wrappers/interface/trace.h" #include "webrtc/test/testsupport/fileutils.h" #include "webrtc/test/testsupport/gtest_disable.h" @@ -37,6 +37,8 @@ using webrtc::AudioProcessing; using webrtc::AudioFrame; +using webrtc::Config; +using webrtc::ExperimentalAgc; using webrtc::GainControl; using webrtc::NoiseSuppression; using webrtc::EchoCancellation; @@ -50,17 +52,19 @@ using webrtc::EchoControlMobile; using webrtc::VoiceDetection; namespace { +// TODO(bjornv): This is not feasible until the functionality has been +// re-implemented; see comment at the bottom of this file. // When false, this will compare the output data with the results stored to // file. This is the typical case. When the file should be updated, it can // be set to true with the command-line switch --write_ref_data. #ifdef WEBRTC_AUDIOPROC_BIT_EXACT bool write_ref_data = false; +const int kChannels[] = {1, 2}; +const size_t kChannelsSize = sizeof(kChannels) / sizeof(*kChannels); #endif const int kSampleRates[] = {8000, 16000, 32000}; const size_t kSampleRatesSize = sizeof(kSampleRates) / sizeof(*kSampleRates); -const int kChannels[] = {1, 2}; -const size_t kChannelsSize = sizeof(kChannels) / sizeof(*kChannels); #if defined(WEBRTC_AUDIOPROC_FIXED_PROFILE) // AECM doesn't support super-wb. @@ -113,21 +117,6 @@ void SetFrameTo(AudioFrame* frame, int16_t left, int16_t right) { } } -template -T AbsValue(T a) { - return a > 0 ? a: -a; -} - -int16_t MaxAudioFrame(const AudioFrame& frame) { - const int length = frame.samples_per_channel_ * frame.num_channels_; - int16_t max_data = AbsValue(frame.data_[0]); - for (int i = 1; i < length; i++) { - max_data = std::max(max_data, AbsValue(frame.data_[i])); - } - - return max_data; -} - bool FrameDataAreEqual(const AudioFrame& frame1, const AudioFrame& frame2) { if (frame1.samples_per_channel_ != frame2.samples_per_channel_) { @@ -145,6 +134,24 @@ bool FrameDataAreEqual(const AudioFrame& frame1, const AudioFrame& frame2) { return true; } +#ifdef WEBRTC_AUDIOPROC_BIT_EXACT +// These functions are only used by the bit-exact test. +template +T AbsValue(T a) { + return a > 0 ? a: -a; +} + +int16_t MaxAudioFrame(const AudioFrame& frame) { + const int length = frame.samples_per_channel_ * frame.num_channels_; + int16_t max_data = AbsValue(frame.data_[0]); + for (int i = 1; i < length; i++) { + max_data = std::max(max_data, AbsValue(frame.data_[i])); + } + + return max_data; +} + +#if defined(WEBRTC_AUDIOPROC_FLOAT_PROFILE) void TestStats(const AudioProcessing::Statistic& test, const webrtc::audioproc::Test::Statistic& reference) { EXPECT_EQ(reference.instant(), test.instant); @@ -160,6 +167,7 @@ void WriteStatsMessage(const AudioProcessing::Statistic& output, message->set_maximum(output.maximum); message->set_minimum(output.minimum); } +#endif void WriteMessageLiteToFile(const std::string filename, const ::google::protobuf::MessageLite& message) { @@ -196,16 +204,7 @@ void ReadMessageLiteFromFile(const std::string filename, delete [] array; fclose(file); } - -struct ThreadData { - ThreadData(int thread_num_, AudioProcessing* ap_) - : thread_num(thread_num_), - error(false), - ap(ap_) {} - int thread_num; - bool error; - AudioProcessing* ap; -}; +#endif // WEBRTC_AUDIOPROC_BIT_EXACT class ApmTest : public ::testing::Test { protected: @@ -236,11 +235,10 @@ class ApmTest : public ::testing::Test { void EnableAllComponents(); bool ReadFrame(FILE* file, AudioFrame* frame); void ProcessWithDefaultStreamParameters(AudioFrame* frame); - template - void ChangeTriggersInit(F f, AudioProcessing* ap, int initial_value, - int changed_value); void ProcessDelayVerificationTest(int delay_ms, int system_delay_ms, int delay_min, int delay_max); + void TestChangingChannels(int num_channels, + AudioProcessing::Error expected_return); const std::string output_path_; const std::string ref_path_; @@ -262,12 +260,15 @@ ApmTest::ApmTest() #elif defined(WEBRTC_AUDIOPROC_FLOAT_PROFILE) ref_filename_(ref_path_ + "output_data_float.pb"), #endif - apm_(AudioProcessing::Create(0)), frame_(NULL), revframe_(NULL), far_file_(NULL), near_file_(NULL), - out_file_(NULL) {} + out_file_(NULL) { + Config config; + config.Set(new ExperimentalAgc(false)); + apm_.reset(AudioProcessing::Create(config)); +} void ApmTest::SetUp() { ASSERT_TRUE(apm_.get() != NULL); @@ -275,7 +276,11 @@ void ApmTest::SetUp() { frame_ = new AudioFrame(); revframe_ = new AudioFrame(); +#if defined(WEBRTC_AUDIOPROC_FIXED_PROFILE) + Init(16000, 2, 2, 2, false); +#else Init(32000, 2, 2, 2, false); +#endif } void ApmTest::TearDown() { @@ -336,17 +341,8 @@ std::string ApmTest::OutputFilePath(std::string name, void ApmTest::Init(int sample_rate_hz, int num_reverse_channels, int num_input_channels, int num_output_channels, bool open_output_file) { - ASSERT_EQ(apm_->kNoError, apm_->Initialize()); - - // Handles error checking of the parameters as well. No need to repeat it. - ASSERT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(sample_rate_hz)); - ASSERT_EQ(apm_->kNoError, apm_->set_num_channels(num_input_channels, - num_output_channels)); - ASSERT_EQ(apm_->kNoError, - apm_->set_num_reverse_channels(num_reverse_channels)); - // We always use 10 ms frames. - const int samples_per_channel = sample_rate_hz / 100; + const int samples_per_channel = kChunkSizeMs * sample_rate_hz / 1000; frame_->samples_per_channel_ = samples_per_channel; frame_->num_channels_ = num_input_channels; frame_->sample_rate_hz_ = sample_rate_hz; @@ -354,6 +350,12 @@ void ApmTest::Init(int sample_rate_hz, int num_reverse_channels, revframe_->num_channels_ = num_reverse_channels; revframe_->sample_rate_hz_ = sample_rate_hz; + // Make one process call to ensure the audio parameters are set. It might + // result in a stream error which we can safely ignore. + int err = apm_->ProcessStream(frame_); + ASSERT_TRUE(err == kNoErr || err == apm_->kStreamParameterNotSetError); + ASSERT_EQ(apm_->kNoError, apm_->Initialize()); + if (far_file_) { ASSERT_EQ(0, fclose(far_file_)); } @@ -384,7 +386,6 @@ void ApmTest::Init(int sample_rate_hz, int num_reverse_channels, void ApmTest::EnableAllComponents() { #if defined(WEBRTC_AUDIOPROC_FIXED_PROFILE) - EXPECT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(16000)); EXPECT_EQ(apm_->kNoError, apm_->echo_control_mobile()->Enable(true)); EXPECT_EQ(apm_->kNoError, @@ -448,50 +449,6 @@ void ApmTest::ProcessWithDefaultStreamParameters(AudioFrame* frame) { EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame)); } -template -void ApmTest::ChangeTriggersInit(F f, AudioProcessing* ap, int initial_value, - int changed_value) { - EnableAllComponents(); - Init(16000, 2, 2, 2, false); - SetFrameTo(frame_, 1000); - AudioFrame frame_copy; - frame_copy.CopyFrom(*frame_); - ProcessWithDefaultStreamParameters(frame_); - // Verify the processing has actually changed the frame. - EXPECT_FALSE(FrameDataAreEqual(*frame_, frame_copy)); - - // Test that a change in value triggers an init. - f(apm_.get(), changed_value); - f(apm_.get(), initial_value); - ProcessWithDefaultStreamParameters(&frame_copy); - EXPECT_TRUE(FrameDataAreEqual(*frame_, frame_copy)); - - apm_->Initialize(); - SetFrameTo(frame_, 1000); - AudioFrame initial_frame; - initial_frame.CopyFrom(*frame_); - ProcessWithDefaultStreamParameters(frame_); - ProcessWithDefaultStreamParameters(frame_); - // Verify the processing has actually changed the frame. - EXPECT_FALSE(FrameDataAreEqual(*frame_, initial_frame)); - - frame_copy.CopyFrom(initial_frame); - apm_->Initialize(); - ProcessWithDefaultStreamParameters(&frame_copy); - // Verify an init here would result in different output. - apm_->Initialize(); - ProcessWithDefaultStreamParameters(&frame_copy); - EXPECT_FALSE(FrameDataAreEqual(*frame_, frame_copy)); - - frame_copy.CopyFrom(initial_frame); - apm_->Initialize(); - ProcessWithDefaultStreamParameters(&frame_copy); - // Test that the same value does not trigger an init. - f(apm_.get(), initial_value); - ProcessWithDefaultStreamParameters(&frame_copy); - EXPECT_TRUE(FrameDataAreEqual(*frame_, frame_copy)); -} - void ApmTest::ProcessDelayVerificationTest(int delay_ms, int system_delay_ms, int delay_min, int delay_max) { // The |revframe_| and |frame_| should include the proper frame information, @@ -519,10 +476,10 @@ void ApmTest::ProcessDelayVerificationTest(int delay_ms, int system_delay_ms, frame_queue.push(frame); frame_delay--; } - // Run for 4.5 seconds, skipping statistics from the first second. We need - // enough frames with audio to have reliable estimates, but as few as possible - // to keep processing time down. 4.5 seconds seemed to be a good compromise - // for this recording. + // Run for 4.5 seconds, skipping statistics from the first 2.5 seconds. We + // need enough frames with audio to have reliable estimates, but as few as + // possible to keep processing time down. 4.5 seconds seemed to be a good + // compromise for this recording. for (int frame_count = 0; frame_count < 450; ++frame_count) { webrtc::AudioFrame* frame = new AudioFrame(); frame->CopyFrom(tmp_frame); @@ -546,7 +503,7 @@ void ApmTest::ProcessDelayVerificationTest(int delay_ms, int system_delay_ms, frame_queue.pop(); delete frame; - if (frame_count == 100) { + if (frame_count == 250) { int median; int std; // Discard the first delay metrics to avoid convergence effects. @@ -585,7 +542,6 @@ TEST_F(ApmTest, StreamParameters) { apm_->ProcessStream(frame_)); // -- Missing AGC level -- - EXPECT_EQ(apm_->kNoError, apm_->Initialize()); EXPECT_EQ(apm_->kNoError, apm_->gain_control()->Enable(true)); EXPECT_EQ(apm_->kStreamParameterNotSetError, apm_->ProcessStream(frame_)); @@ -608,8 +564,8 @@ TEST_F(ApmTest, StreamParameters) { apm_->echo_cancellation()->enable_drift_compensation(false)); // -- Missing delay -- - EXPECT_EQ(apm_->kNoError, apm_->Initialize()); EXPECT_EQ(apm_->kNoError, apm_->echo_cancellation()->Enable(true)); + EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_)); EXPECT_EQ(apm_->kStreamParameterNotSetError, apm_->ProcessStream(frame_)); // Resets after successful ProcessStream(). @@ -628,7 +584,6 @@ TEST_F(ApmTest, StreamParameters) { EXPECT_EQ(apm_->kNoError, apm_->gain_control()->Enable(false)); // -- Missing drift -- - EXPECT_EQ(apm_->kNoError, apm_->Initialize()); EXPECT_EQ(apm_->kStreamParameterNotSetError, apm_->ProcessStream(frame_)); // Resets after successful ProcessStream(). @@ -645,14 +600,12 @@ TEST_F(ApmTest, StreamParameters) { EXPECT_EQ(apm_->kStreamParameterNotSetError, apm_->ProcessStream(frame_)); // -- No stream parameters -- - EXPECT_EQ(apm_->kNoError, apm_->Initialize()); EXPECT_EQ(apm_->kNoError, apm_->AnalyzeReverseStream(revframe_)); EXPECT_EQ(apm_->kStreamParameterNotSetError, apm_->ProcessStream(frame_)); // -- All there -- - EXPECT_EQ(apm_->kNoError, apm_->Initialize()); EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(100)); apm_->echo_cancellation()->set_stream_drift_samples(0); EXPECT_EQ(apm_->kNoError, @@ -684,65 +637,38 @@ TEST_F(ApmTest, DelayOffsetWithLimitsIsSetProperly) { EXPECT_EQ(50, apm_->stream_delay_ms()); } +void ApmTest::TestChangingChannels(int num_channels, + AudioProcessing::Error expected_return) { + frame_->num_channels_ = num_channels; + EXPECT_EQ(expected_return, apm_->ProcessStream(frame_)); + EXPECT_EQ(expected_return, apm_->AnalyzeReverseStream(frame_)); +} + TEST_F(ApmTest, Channels) { - // Testing number of invalid channels - EXPECT_EQ(apm_->kBadParameterError, apm_->set_num_channels(0, 1)); - EXPECT_EQ(apm_->kBadParameterError, apm_->set_num_channels(1, 0)); - EXPECT_EQ(apm_->kBadParameterError, apm_->set_num_channels(3, 1)); - EXPECT_EQ(apm_->kBadParameterError, apm_->set_num_channels(1, 3)); - EXPECT_EQ(apm_->kBadParameterError, apm_->set_num_reverse_channels(0)); - EXPECT_EQ(apm_->kBadParameterError, apm_->set_num_reverse_channels(3)); - // Testing number of valid channels + // Testing number of invalid channels. + TestChangingChannels(0, apm_->kBadNumberChannelsError); + TestChangingChannels(3, apm_->kBadNumberChannelsError); + // Testing number of valid channels. for (int i = 1; i < 3; i++) { - for (int j = 1; j < 3; j++) { - if (j > i) { - EXPECT_EQ(apm_->kBadParameterError, apm_->set_num_channels(i, j)); - } else { - EXPECT_EQ(apm_->kNoError, apm_->set_num_channels(i, j)); - EXPECT_EQ(j, apm_->num_output_channels()); - } - } + TestChangingChannels(i, kNoErr); EXPECT_EQ(i, apm_->num_input_channels()); - EXPECT_EQ(apm_->kNoError, apm_->set_num_reverse_channels(i)); EXPECT_EQ(i, apm_->num_reverse_channels()); } } TEST_F(ApmTest, SampleRates) { // Testing invalid sample rates - EXPECT_EQ(apm_->kBadParameterError, apm_->set_sample_rate_hz(10000)); + SetFrameSampleRate(frame_, 10000); + EXPECT_EQ(apm_->kBadSampleRateError, apm_->ProcessStream(frame_)); // Testing valid sample rates int fs[] = {8000, 16000, 32000}; for (size_t i = 0; i < sizeof(fs) / sizeof(*fs); i++) { - EXPECT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(fs[i])); + SetFrameSampleRate(frame_, fs[i]); + EXPECT_EQ(kNoErr, apm_->ProcessStream(frame_)); EXPECT_EQ(fs[i], apm_->sample_rate_hz()); } } -void SetSampleRate(AudioProcessing* ap, int value) { - EXPECT_EQ(ap->kNoError, ap->set_sample_rate_hz(value)); -} - -void SetNumReverseChannels(AudioProcessing* ap, int value) { - EXPECT_EQ(ap->kNoError, ap->set_num_reverse_channels(value)); -} - -void SetNumOutputChannels(AudioProcessing* ap, int value) { - EXPECT_EQ(ap->kNoError, ap->set_num_channels(2, value)); -} - -TEST_F(ApmTest, SampleRateChangeTriggersInit) { - ChangeTriggersInit(SetSampleRate, apm_.get(), 16000, 8000); -} - -TEST_F(ApmTest, ReverseChannelChangeTriggersInit) { - ChangeTriggersInit(SetNumReverseChannels, apm_.get(), 2, 1); -} - -TEST_F(ApmTest, ChannelChangeTriggersInit) { - ChangeTriggersInit(SetNumOutputChannels, apm_.get(), 2, 1); -} - TEST_F(ApmTest, EchoCancellation) { EXPECT_EQ(apm_->kNoError, apm_->echo_cancellation()->enable_drift_compensation(true)); @@ -844,7 +770,7 @@ TEST_F(ApmTest, EchoCancellationReportsCorrectDelays) { Init(kProcessSampleRates[i], 2, 2, 2, false); // Sampling frequency dependent variables. const int num_ms_per_block = std::max(4, - 640 / frame_->samples_per_channel_); + 640 / frame_->samples_per_channel_); const int delay_min_ms = -kLookaheadBlocks * num_ms_per_block; const int delay_max_ms = (kMaxDelayBlocks - 1) * num_ms_per_block; @@ -882,13 +808,16 @@ TEST_F(ApmTest, EchoCancellationReportsCorrectDelays) { TEST_F(ApmTest, EchoControlMobile) { // AECM won't use super-wideband. - EXPECT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(32000)); + SetFrameSampleRate(frame_, 32000); + EXPECT_EQ(kNoErr, apm_->ProcessStream(frame_)); EXPECT_EQ(apm_->kBadSampleRateError, apm_->echo_control_mobile()->Enable(true)); - EXPECT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(16000)); + SetFrameSampleRate(frame_, 16000); + EXPECT_EQ(kNoErr, apm_->ProcessStream(frame_)); EXPECT_EQ(apm_->kNoError, apm_->echo_control_mobile()->Enable(true)); - EXPECT_EQ(apm_->kUnsupportedComponentError, apm_->set_sample_rate_hz(32000)); + SetFrameSampleRate(frame_, 32000); + EXPECT_EQ(apm_->kUnsupportedComponentError, apm_->ProcessStream(frame_)); // Turn AECM on (and AEC off) Init(16000, 2, 2, 2, false); @@ -1094,7 +1023,6 @@ TEST_F(ApmTest, LevelEstimator) { // Run this test in wideband; in super-wb, the splitting filter distorts the // audio enough to cause deviation from the expectation for small values. - EXPECT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(16000)); frame_->samples_per_channel_ = 160; frame_->num_channels_ = 2; frame_->sample_rate_hz_ = 16000; @@ -1220,19 +1148,6 @@ TEST_F(ApmTest, VoiceDetection) { // TODO(bjornv): Add tests for streamed voice; stream_has_voice() } -TEST_F(ApmTest, VerifyDownMixing) { - for (size_t i = 0; i < kSampleRatesSize; i++) { - Init(kSampleRates[i], 2, 2, 1, false); - SetFrameTo(frame_, 1000, 2000); - AudioFrame mono_frame; - mono_frame.samples_per_channel_ = frame_->samples_per_channel_; - mono_frame.num_channels_ = 1; - SetFrameTo(&mono_frame, 1500); - EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_)); - EXPECT_TRUE(FrameDataAreEqual(*frame_, mono_frame)); - } -} - TEST_F(ApmTest, AllProcessingDisabledByDefault) { EXPECT_FALSE(apm_->echo_cancellation()->is_enabled()); EXPECT_FALSE(apm_->echo_control_mobile()->is_enabled()); @@ -1328,7 +1243,6 @@ TEST_F(ApmTest, SplittingFilter) { EXPECT_EQ(apm_->kNoError, apm_->voice_detection()->Enable(false)); // 5. Not using super-wb. - EXPECT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(16000)); frame_->samples_per_channel_ = 160; frame_->num_channels_ = 2; frame_->sample_rate_hz_ = 16000; @@ -1349,7 +1263,6 @@ TEST_F(ApmTest, SplittingFilter) { // Check the test is valid. We should have distortion from the filter // when AEC is enabled (which won't affect the audio). - EXPECT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(32000)); frame_->samples_per_channel_ = 320; frame_->num_channels_ = 2; frame_->sample_rate_hz_ = 32000; @@ -1364,15 +1277,16 @@ TEST_F(ApmTest, SplittingFilter) { // TODO(andrew): expand test to verify output. TEST_F(ApmTest, DebugDump) { const std::string filename = webrtc::test::OutputPath() + "debug.aec"; - EXPECT_EQ(apm_->kNullPointerError, apm_->StartDebugRecording(NULL)); + EXPECT_EQ(apm_->kNullPointerError, + apm_->StartDebugRecording(static_cast(NULL))); #ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP // Stopping without having started should be OK. EXPECT_EQ(apm_->kNoError, apm_->StopDebugRecording()); EXPECT_EQ(apm_->kNoError, apm_->StartDebugRecording(filename.c_str())); - EXPECT_EQ(apm_->kNoError, apm_->AnalyzeReverseStream(revframe_)); EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_)); + EXPECT_EQ(apm_->kNoError, apm_->AnalyzeReverseStream(revframe_)); EXPECT_EQ(apm_->kNoError, apm_->StopDebugRecording()); // Verify the file has been written. @@ -1392,6 +1306,39 @@ TEST_F(ApmTest, DebugDump) { #endif // WEBRTC_AUDIOPROC_DEBUG_DUMP } +// TODO(andrew): expand test to verify output. +TEST_F(ApmTest, DebugDumpFromFileHandle) { + FILE* fid = NULL; + EXPECT_EQ(apm_->kNullPointerError, apm_->StartDebugRecording(fid)); + const std::string filename = webrtc::test::OutputPath() + "debug.aec"; + fid = fopen(filename.c_str(), "w"); + ASSERT_TRUE(fid); + +#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP + // Stopping without having started should be OK. + EXPECT_EQ(apm_->kNoError, apm_->StopDebugRecording()); + + EXPECT_EQ(apm_->kNoError, apm_->StartDebugRecording(fid)); + EXPECT_EQ(apm_->kNoError, apm_->AnalyzeReverseStream(revframe_)); + EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_)); + EXPECT_EQ(apm_->kNoError, apm_->StopDebugRecording()); + + // Verify the file has been written. + fid = fopen(filename.c_str(), "r"); + ASSERT_TRUE(fid != NULL); + + // Clean it up. + ASSERT_EQ(0, fclose(fid)); + ASSERT_EQ(0, remove(filename.c_str())); +#else + EXPECT_EQ(apm_->kUnsupportedFunctionError, + apm_->StartDebugRecording(fid)); + EXPECT_EQ(apm_->kUnsupportedFunctionError, apm_->StopDebugRecording()); + + ASSERT_EQ(0, fclose(fid)); +#endif // WEBRTC_AUDIOPROC_DEBUG_DUMP +} + // TODO(andrew): Add a test to process a few frames with different combinations // of enabled components. @@ -1408,15 +1355,12 @@ TEST_F(ApmTest, DISABLED_ON_ANDROID(Process)) { // Write the desired tests to the protobuf reference file. for (size_t i = 0; i < kChannelsSize; i++) { for (size_t j = 0; j < kChannelsSize; j++) { - // We can't have more output than input channels. - for (size_t k = 0; k <= j; k++) { - for (size_t l = 0; l < kProcessSampleRatesSize; l++) { - webrtc::audioproc::Test* test = ref_data.add_test(); - test->set_num_reverse_channels(kChannels[i]); - test->set_num_input_channels(kChannels[j]); - test->set_num_output_channels(kChannels[k]); - test->set_sample_rate(kProcessSampleRates[l]); - } + for (size_t l = 0; l < kProcessSampleRatesSize; l++) { + webrtc::audioproc::Test* test = ref_data.add_test(); + test->set_num_reverse_channels(kChannels[i]); + test->set_num_input_channels(kChannels[j]); + test->set_num_output_channels(kChannels[j]); + test->set_sample_rate(kProcessSampleRates[l]); } } } @@ -1428,6 +1372,11 @@ TEST_F(ApmTest, DISABLED_ON_ANDROID(Process)) { printf("Running test %d of %d...\n", i + 1, ref_data.test_size()); webrtc::audioproc::Test* test = ref_data.mutable_test(i); + // TODO(ajm): We no longer allow different input and output channels. Skip + // these tests for now, but they should be removed from the set. + if (test->num_input_channels() != test->num_output_channels()) + continue; + Init(test->sample_rate(), test->num_reverse_channels(), test->num_input_channels(), test->num_output_channels(), true); diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/test/process_test.cc b/media/webrtc/trunk/webrtc/modules/audio_processing/test/process_test.cc index cab93fea76e9..8a5d0f5fd661 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/test/process_test.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/test/process_test.cc @@ -17,6 +17,7 @@ #include +#include "webrtc/common.h" #include "webrtc/modules/audio_processing/include/audio_processing.h" #include "webrtc/modules/interface/module_common_types.h" #include "webrtc/system_wrappers/interface/cpu_features_wrapper.h" @@ -34,6 +35,8 @@ using webrtc::AudioFrame; using webrtc::AudioProcessing; +using webrtc::Config; +using webrtc::DelayCorrection; using webrtc::EchoCancellation; using webrtc::GainControl; using webrtc::NoiseSuppression; @@ -106,6 +109,7 @@ void usage() { printf(" --no_echo_metrics\n"); printf(" --no_delay_logging\n"); printf(" --aec_suppression_level LEVEL [0 - 2]\n"); + printf(" --extended_filter\n"); printf("\n -aecm Echo control mobile\n"); printf(" --aecm_echo_path_in_file FILE\n"); printf(" --aecm_echo_path_out_file FILE\n"); @@ -133,7 +137,8 @@ void usage() { printf("\n"); printf("Modifiers:\n"); printf(" --noasm Disable SSE optimization.\n"); - printf(" --delay DELAY Add DELAY ms to input value.\n"); + printf(" --add_delay DELAY Add DELAY ms to input value.\n"); + printf(" --delay DELAY Override input delay with DELAY ms.\n"); printf(" --perf Measure performance.\n"); printf(" --quiet Suppress text output.\n"); printf(" --no_progress Suppress progress.\n"); @@ -194,6 +199,7 @@ void void_main(int argc, char* argv[]) { bool verbose = true; bool progress = true; int extra_delay_ms = 0; + int override_delay_ms = 0; //bool interleaved = true; ASSERT_EQ(apm->kNoError, apm->level_estimator()->Enable(true)); @@ -226,9 +232,6 @@ void void_main(int argc, char* argv[]) { ASSERT_EQ(1, sscanf(argv[i], "%d", &sample_rate_hz)); samples_per_channel = sample_rate_hz / 100; - ASSERT_EQ(apm->kNoError, - apm->set_sample_rate_hz(sample_rate_hz)); - } else if (strcmp(argv[i], "-ch") == 0) { i++; ASSERT_LT(i + 1, argc) << "Specify number of channels after -ch"; @@ -236,18 +239,11 @@ void void_main(int argc, char* argv[]) { i++; ASSERT_EQ(1, sscanf(argv[i], "%d", &num_capture_output_channels)); - ASSERT_EQ(apm->kNoError, - apm->set_num_channels(num_capture_input_channels, - num_capture_output_channels)); - } else if (strcmp(argv[i], "-rch") == 0) { i++; ASSERT_LT(i, argc) << "Specify number of channels after -rch"; ASSERT_EQ(1, sscanf(argv[i], "%d", &num_render_channels)); - ASSERT_EQ(apm->kNoError, - apm->set_num_reverse_channels(num_render_channels)); - } else if (strcmp(argv[i], "-aec") == 0) { ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true)); ASSERT_EQ(apm->kNoError, @@ -289,6 +285,11 @@ void void_main(int argc, char* argv[]) { static_cast( suppression_level))); + } else if (strcmp(argv[i], "--extended_filter") == 0) { + Config config; + config.Set(new DelayCorrection(true)); + apm->SetExtraOptions(config); + } else if (strcmp(argv[i], "-aecm") == 0) { ASSERT_EQ(apm->kNoError, apm->echo_control_mobile()->Enable(true)); @@ -430,10 +431,14 @@ void void_main(int argc, char* argv[]) { // We need to reinitialize here if components have already been enabled. ASSERT_EQ(apm->kNoError, apm->Initialize()); - } else if (strcmp(argv[i], "--delay") == 0) { + } else if (strcmp(argv[i], "--add_delay") == 0) { i++; ASSERT_EQ(1, sscanf(argv[i], "%d", &extra_delay_ms)); + } else if (strcmp(argv[i], "--delay") == 0) { + i++; + ASSERT_EQ(1, sscanf(argv[i], "%d", &override_delay_ms)); + } else if (strcmp(argv[i], "--perf") == 0) { perf_testing = true; @@ -622,9 +627,9 @@ void void_main(int argc, char* argv[]) { const Init msg = event_msg.init(); ASSERT_TRUE(msg.has_sample_rate()); - ASSERT_EQ(apm->kNoError, - apm->set_sample_rate_hz(msg.sample_rate())); - + // TODO(bjornv): Replace set_sample_rate_hz() when we have a smarter + // AnalyzeReverseStream(). + ASSERT_EQ(apm->kNoError, apm->set_sample_rate_hz(msg.sample_rate())); ASSERT_TRUE(msg.has_device_sample_rate()); ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->set_device_sample_rate_hz( @@ -632,13 +637,7 @@ void void_main(int argc, char* argv[]) { ASSERT_TRUE(msg.has_num_input_channels()); ASSERT_TRUE(msg.has_num_output_channels()); - ASSERT_EQ(apm->kNoError, - apm->set_num_channels(msg.num_input_channels(), - msg.num_output_channels())); - ASSERT_TRUE(msg.has_num_reverse_channels()); - ASSERT_EQ(apm->kNoError, - apm->set_num_reverse_channels(msg.num_reverse_channels())); samples_per_channel = msg.sample_rate() / 100; far_frame.sample_rate_hz_ = msg.sample_rate(); @@ -715,8 +714,12 @@ void void_main(int argc, char* argv[]) { ASSERT_EQ(apm->kNoError, apm->gain_control()->set_stream_analog_level(msg.level())); + delay_ms = msg.delay() + extra_delay_ms; + if (override_delay_ms) { + delay_ms = override_delay_ms; + } ASSERT_EQ(apm->kNoError, - apm->set_stream_delay_ms(msg.delay() + extra_delay_ms)); + apm->set_stream_delay_ms(delay_ms)); apm->echo_cancellation()->set_stream_drift_samples(msg.drift()); int err = apm->ProcessStream(&near_frame); @@ -814,8 +817,9 @@ void void_main(int argc, char* argv[]) { 1, event_file)); - ASSERT_EQ(apm->kNoError, - apm->set_sample_rate_hz(sample_rate_hz)); + // TODO(bjornv): Replace set_sample_rate_hz() when we have a smarter + // AnalyzeReverseStream(). + ASSERT_EQ(apm->kNoError, apm->set_sample_rate_hz(sample_rate_hz)); ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->set_device_sample_rate_hz( @@ -918,8 +922,12 @@ void void_main(int argc, char* argv[]) { const int capture_level_in = capture_level; ASSERT_EQ(apm->kNoError, apm->gain_control()->set_stream_analog_level(capture_level)); + delay_ms += extra_delay_ms; + if (override_delay_ms) { + delay_ms = override_delay_ms; + } ASSERT_EQ(apm->kNoError, - apm->set_stream_delay_ms(delay_ms + extra_delay_ms)); + apm->set_stream_delay_ms(delay_ms)); apm->echo_cancellation()->set_stream_drift_samples(drift_samples); int err = apm->ProcessStream(&near_frame); diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/test/test_utils.h b/media/webrtc/trunk/webrtc/modules/audio_processing/test/test_utils.h new file mode 100644 index 000000000000..452d84360fc4 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/test/test_utils.h @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/audio_processing/include/audio_processing.h" +#include "webrtc/modules/interface/module_common_types.h" + +static const int kChunkSizeMs = 10; +static const webrtc::AudioProcessing::Error kNoErr = + webrtc::AudioProcessing::kNoError; + +static void SetFrameSampleRate(webrtc::AudioFrame* frame, int sample_rate_hz) { + frame->sample_rate_hz_ = sample_rate_hz; + frame->samples_per_channel_ = kChunkSizeMs * sample_rate_hz / 1000; +} diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/test/unpack.cc b/media/webrtc/trunk/webrtc/modules/audio_processing/test/unpack.cc index 0740143bebcf..f8a4bb17ad53 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/test/unpack.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/test/unpack.cc @@ -36,6 +36,7 @@ DEFINE_string(reverse_file, "reverse.pcm", DEFINE_string(delay_file, "delay.int32", "The name of the delay file."); DEFINE_string(drift_file, "drift.int32", "The name of the drift file."); DEFINE_string(level_file, "level.int32", "The name of the level file."); +DEFINE_string(keypress_file, "keypress.bool", "The name of the keypress file."); DEFINE_string(settings_file, "settings.txt", "The name of the settings file."); DEFINE_bool(full, false, "Unpack the full set of files (normally not needed)."); @@ -105,6 +106,7 @@ int main(int argc, char* argv[]) { FILE* delay_file = NULL; FILE* drift_file = NULL; FILE* level_file = NULL; + FILE* keypress_file = NULL; if (FLAGS_full) { delay_file = fopen(FLAGS_delay_file.c_str(), "wb"); if (delay_file == NULL) { @@ -121,6 +123,11 @@ int main(int argc, char* argv[]) { printf("Unable to open %s\n", FLAGS_level_file.c_str()); return 1; } + keypress_file = fopen(FLAGS_keypress_file.c_str(), "wb"); + if (keypress_file == NULL) { + printf("Unable to open %s\n", FLAGS_keypress_file.c_str()); + return 1; + } } Event event_msg; @@ -188,6 +195,14 @@ int main(int argc, char* argv[]) { return 1; } } + + if (msg.has_keypress()) { + bool keypress = msg.keypress(); + if (fwrite(&keypress, sizeof(bool), 1, keypress_file) != 1) { + printf("Error when writing to %s\n", FLAGS_keypress_file.c_str()); + return 1; + } + } } } else if (event_msg.type() == Event::INIT) { if (!event_msg.has_init()) { diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/typing_detection.cc b/media/webrtc/trunk/webrtc/modules/audio_processing/typing_detection.cc new file mode 100644 index 000000000000..5f5ce0abafd5 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/typing_detection.cc @@ -0,0 +1,90 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/audio_processing/typing_detection.h" + +namespace webrtc { + +TypingDetection::TypingDetection() + : time_active_(0), + time_since_last_typing_(0), + penalty_counter_(0), + counter_since_last_detection_update_(0), + detection_to_report_(false), + new_detection_to_report_(false), + time_window_(10), + cost_per_typing_(100), + reporting_threshold_(300), + penalty_decay_(1), + type_event_delay_(2), + report_detection_update_period_(1) { +} + +TypingDetection::~TypingDetection() {} + +bool TypingDetection::Process(bool key_pressed, bool vad_activity) { + if (vad_activity) + time_active_++; + else + time_active_ = 0; + + // Keep track if time since last typing event + if (key_pressed) + time_since_last_typing_ = 0; + else + ++time_since_last_typing_; + + if (time_since_last_typing_ < type_event_delay_ && + vad_activity && + time_active_ < time_window_) { + penalty_counter_ += cost_per_typing_; + if (penalty_counter_ > reporting_threshold_) + new_detection_to_report_ = true; + } + + if (penalty_counter_ > 0) + penalty_counter_ -= penalty_decay_; + + if (++counter_since_last_detection_update_ == + report_detection_update_period_) { + detection_to_report_ = new_detection_to_report_; + new_detection_to_report_ = false; + counter_since_last_detection_update_ = 0; + } + + return detection_to_report_; +} + +int TypingDetection::TimeSinceLastDetectionInSeconds() { + // Round to whole seconds. + return (time_since_last_typing_ + 50) / 100; +} + +void TypingDetection::SetParameters(int time_window, + int cost_per_typing, + int reporting_threshold, + int penalty_decay, + int type_event_delay, + int report_detection_update_period) { + if (time_window) time_window_ = time_window; + + if (cost_per_typing) cost_per_typing_ = cost_per_typing; + + if (reporting_threshold) reporting_threshold_ = reporting_threshold; + + if (penalty_decay) penalty_decay_ = penalty_decay; + + if (type_event_delay) type_event_delay_ = type_event_delay; + + if (report_detection_update_period) + report_detection_update_period_ = report_detection_update_period; +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/typing_detection.h b/media/webrtc/trunk/webrtc/modules/audio_processing/typing_detection.h new file mode 100644 index 000000000000..5fa6456e9e93 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/typing_detection.h @@ -0,0 +1,93 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_TYPING_DETECTION_H_ +#define WEBRTC_MODULES_AUDIO_PROCESSING_TYPING_DETECTION_H_ + +#include "webrtc/modules/interface/module_common_types.h" +#include "webrtc/typedefs.h" + +namespace webrtc { + +class TypingDetection { + public: + TypingDetection(); + virtual ~TypingDetection(); + + // Run the detection algortihm. Shall be called every 10 ms. Returns true if + // typing is detected, or false if not, based on the update period as set with + // SetParameters(). See |report_detection_update_period_| description below. + bool Process(bool key_pressed, bool vad_activity); + + // Gets the time in seconds since the last detection. + int TimeSinceLastDetectionInSeconds(); + + // Sets the algorithm parameters. A parameter value of 0 leaves it unchanged. + // See the correspondning member variables below for descriptions. + void SetParameters(int time_window, + int cost_per_typing, + int reporting_threshold, + int penalty_decay, + int type_event_delay, + int report_detection_update_period); + + private: + int time_active_; + int time_since_last_typing_; + int penalty_counter_; + + // Counter since last time the detection status reported by Process() was + // updated. See also |report_detection_update_period_|. + int counter_since_last_detection_update_; + + // The detection status to report. Updated every + // |report_detection_update_period_| call to Process(). + bool detection_to_report_; + + // What |detection_to_report_| should be set to next time it is updated. + bool new_detection_to_report_; + + // Settable threshold values. + + // Number of 10 ms slots accepted to count as a hit. + int time_window_; + + // Penalty added for a typing + activity coincide. + int cost_per_typing_; + + // Threshold for |penalty_counter_|. + int reporting_threshold_; + + // How much we reduce |penalty_counter_| every 10 ms. + int penalty_decay_; + + // How old typing events we allow. + int type_event_delay_; + + // Settable update period. + + // Number of 10 ms slots between each update of the detection status returned + // by Process(). This inertia added to the algorithm is usually desirable and + // provided so that consumers of the class don't have to implement that + // themselves if they don't wish. + // If set to 1, each call to Process() will return the detection status for + // that 10 ms slot. + // If set to N (where N > 1), the detection status returned from Process() + // will remain the same until Process() has been called N times. Then, if none + // of the last N calls to Process() has detected typing for each respective + // 10 ms slot, Process() will return false. If at least one of the last N + // calls has detected typing, Process() will return true. And that returned + // status will then remain the same until the next N calls have been done. + int report_detection_update_period_; +}; + +} // namespace webrtc + +#endif // #ifndef WEBRTC_MODULES_AUDIO_PROCESSING_TYPING_DETECTION_H_ diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/utility/delay_estimator.c b/media/webrtc/trunk/webrtc/modules/audio_processing/utility/delay_estimator.c index 91c1e9b40f73..c67e174a0a72 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/utility/delay_estimator.c +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/utility/delay_estimator.c @@ -23,6 +23,18 @@ static const int32_t kProbabilityOffset = 1024; // 2 in Q9. static const int32_t kProbabilityLowerLimit = 8704; // 17 in Q9. static const int32_t kProbabilityMinSpread = 2816; // 5.5 in Q9. +// Robust validation settings +static const float kHistogramMax = 3000.f; +static const float kLastHistogramMax = 250.f; +static const float kMinHistogramThreshold = 1.5f; +static const int kMinRequiredHits = 10; +static const int kMaxHitsWhenPossiblyNonCausal = 10; +static const int kMaxHitsWhenPossiblyCausal = 1000; +static const float kQ14Scaling = 1.f / (1 << 14); // Scaling by 2^14 to get Q0. +static const float kFractionSlope = 0.05f; +static const float kMinFractionWhenPossiblyCausal = 0.5f; +static const float kMinFractionWhenPossiblyNonCausal = 0.25f; + // Counts and returns number of bits of a 32-bit word. static int BitCount(uint32_t u32) { uint32_t tmp = u32 - ((u32 >> 1) & 033333333333) - @@ -59,6 +71,189 @@ static void BitCountComparison(uint32_t binary_vector, } } +// Collects necessary statistics for the HistogramBasedValidation(). This +// function has to be called prior to calling HistogramBasedValidation(). The +// statistics updated and used by the HistogramBasedValidation() are: +// 1. the number of |candidate_hits|, which states for how long we have had the +// same |candidate_delay| +// 2. the |histogram| of candidate delays over time. This histogram is +// weighted with respect to a reliability measure and time-varying to cope +// with possible delay shifts. +// For further description see commented code. +// +// Inputs: +// - candidate_delay : The delay to validate. +// - valley_depth_q14 : The cost function has a valley/minimum at the +// |candidate_delay| location. |valley_depth_q14| is the +// cost function difference between the minimum and +// maximum locations. The value is in the Q14 domain. +// - valley_level_q14 : Is the cost function value at the minimum, in Q14. +static void UpdateRobustValidationStatistics(BinaryDelayEstimator* self, + int candidate_delay, + int32_t valley_depth_q14, + int32_t valley_level_q14) { + const float valley_depth = valley_depth_q14 * kQ14Scaling; + float decrease_in_last_set = valley_depth; + const int max_hits_for_slow_change = (candidate_delay < self->last_delay) ? + kMaxHitsWhenPossiblyNonCausal : kMaxHitsWhenPossiblyCausal; + int i = 0; + + // Reset |candidate_hits| if we have a new candidate. + if (candidate_delay != self->last_candidate_delay) { + self->candidate_hits = 0; + self->last_candidate_delay = candidate_delay; + } + self->candidate_hits++; + + // The |histogram| is updated differently across the bins. + // 1. The |candidate_delay| histogram bin is increased with the + // |valley_depth|, which is a simple measure of how reliable the + // |candidate_delay| is. The histogram is not increased above + // |kHistogramMax|. + self->histogram[candidate_delay] += valley_depth; + if (self->histogram[candidate_delay] > kHistogramMax) { + self->histogram[candidate_delay] = kHistogramMax; + } + // 2. The histogram bins in the neighborhood of |candidate_delay| are + // unaffected. The neighborhood is defined as x + {-2, -1, 0, 1}. + // 3. The histogram bins in the neighborhood of |last_delay| are decreased + // with |decrease_in_last_set|. This value equals the difference between + // the cost function values at the locations |candidate_delay| and + // |last_delay| until we reach |max_hits_for_slow_change| consecutive hits + // at the |candidate_delay|. If we exceed this amount of hits the + // |candidate_delay| is a "potential" candidate and we start decreasing + // these histogram bins more rapidly with |valley_depth|. + if (self->candidate_hits < max_hits_for_slow_change) { + decrease_in_last_set = (self->mean_bit_counts[self->compare_delay] - + valley_level_q14) * kQ14Scaling; + } + // 4. All other bins are decreased with |valley_depth|. + // TODO(bjornv): Investigate how to make this loop more efficient. Split up + // the loop? Remove parts that doesn't add too much. + for (i = 0; i < self->farend->history_size; ++i) { + int is_in_last_set = (i >= self->last_delay - 2) && + (i <= self->last_delay + 1) && (i != candidate_delay); + int is_in_candidate_set = (i >= candidate_delay - 2) && + (i <= candidate_delay + 1); + self->histogram[i] -= decrease_in_last_set * is_in_last_set + + valley_depth * (!is_in_last_set && !is_in_candidate_set); + // 5. No histogram bin can go below 0. + if (self->histogram[i] < 0) { + self->histogram[i] = 0; + } + } +} + +// Validates the |candidate_delay|, estimated in WebRtc_ProcessBinarySpectrum(), +// based on a mix of counting concurring hits with a modified histogram +// of recent delay estimates. In brief a candidate is valid (returns 1) if it +// is the most likely according to the histogram. There are a couple of +// exceptions that are worth mentioning: +// 1. If the |candidate_delay| < |last_delay| it can be that we are in a +// non-causal state, breaking a possible echo control algorithm. Hence, we +// open up for a quicker change by allowing the change even if the +// |candidate_delay| is not the most likely one according to the histogram. +// 2. There's a minimum number of hits (kMinRequiredHits) and the histogram +// value has to reached a minimum (kMinHistogramThreshold) to be valid. +// 3. The action is also depending on the filter length used for echo control. +// If the delay difference is larger than what the filter can capture, we +// also move quicker towards a change. +// For further description see commented code. +// +// Input: +// - candidate_delay : The delay to validate. +// +// Return value: +// - is_histogram_valid : 1 - The |candidate_delay| is valid. +// 0 - Otherwise. +static int HistogramBasedValidation(const BinaryDelayEstimator* self, + int candidate_delay) { + float fraction = 1.f; + float histogram_threshold = self->histogram[self->compare_delay]; + const int delay_difference = candidate_delay - self->last_delay; + int is_histogram_valid = 0; + + // The histogram based validation of |candidate_delay| is done by comparing + // the |histogram| at bin |candidate_delay| with a |histogram_threshold|. + // This |histogram_threshold| equals a |fraction| of the |histogram| at bin + // |last_delay|. The |fraction| is a piecewise linear function of the + // |delay_difference| between the |candidate_delay| and the |last_delay| + // allowing for a quicker move if + // i) a potential echo control filter can not handle these large differences. + // ii) keeping |last_delay| instead of updating to |candidate_delay| could + // force an echo control into a non-causal state. + // We further require the histogram to have reached a minimum value of + // |kMinHistogramThreshold|. In addition, we also require the number of + // |candidate_hits| to be more than |kMinRequiredHits| to remove spurious + // values. + + // Calculate a comparison histogram value (|histogram_threshold|) that is + // depending on the distance between the |candidate_delay| and |last_delay|. + // TODO(bjornv): How much can we gain by turning the fraction calculation + // into tables? + if (delay_difference > self->allowed_offset) { + fraction = 1.f - kFractionSlope * (delay_difference - self->allowed_offset); + fraction = (fraction > kMinFractionWhenPossiblyCausal ? fraction : + kMinFractionWhenPossiblyCausal); + } else if (delay_difference < 0) { + fraction = kMinFractionWhenPossiblyNonCausal - + kFractionSlope * delay_difference; + fraction = (fraction > 1.f ? 1.f : fraction); + } + histogram_threshold *= fraction; + histogram_threshold = (histogram_threshold > kMinHistogramThreshold ? + histogram_threshold : kMinHistogramThreshold); + + is_histogram_valid = + (self->histogram[candidate_delay] >= histogram_threshold) && + (self->candidate_hits > kMinRequiredHits); + + return is_histogram_valid; +} + +// Performs a robust validation of the |candidate_delay| estimated in +// WebRtc_ProcessBinarySpectrum(). The algorithm takes the +// |is_instantaneous_valid| and the |is_histogram_valid| and combines them +// into a robust validation. The HistogramBasedValidation() has to be called +// prior to this call. +// For further description on how the combination is done, see commented code. +// +// Inputs: +// - candidate_delay : The delay to validate. +// - is_instantaneous_valid : The instantaneous validation performed in +// WebRtc_ProcessBinarySpectrum(). +// - is_histogram_valid : The histogram based validation. +// +// Return value: +// - is_robust : 1 - The candidate_delay is valid according to a +// combination of the two inputs. +// : 0 - Otherwise. +static int RobustValidation(const BinaryDelayEstimator* self, + int candidate_delay, + int is_instantaneous_valid, + int is_histogram_valid) { + int is_robust = 0; + + // The final robust validation is based on the two algorithms; 1) the + // |is_instantaneous_valid| and 2) the histogram based with result stored in + // |is_histogram_valid|. + // i) Before we actually have a valid estimate (|last_delay| == -2), we say + // a candidate is valid if either algorithm states so + // (|is_instantaneous_valid| OR |is_histogram_valid|). + is_robust = (self->last_delay < 0) && + (is_instantaneous_valid || is_histogram_valid); + // ii) Otherwise, we need both algorithms to be certain + // (|is_instantaneous_valid| AND |is_histogram_valid|) + is_robust |= is_instantaneous_valid && is_histogram_valid; + // iii) With one exception, i.e., the histogram based algorithm can overrule + // the instantaneous one if |is_histogram_valid| = 1 and the histogram + // is significantly strong. + is_robust |= is_histogram_valid && + (self->histogram[candidate_delay] > self->last_delay_histogram); + + return is_robust; +} + void WebRtc_FreeBinaryDelayEstimatorFarend(BinaryDelayEstimatorFarend* self) { if (self == NULL) { @@ -139,6 +334,9 @@ void WebRtc_FreeBinaryDelayEstimator(BinaryDelayEstimator* self) { free(self->binary_near_history); self->binary_near_history = NULL; + free(self->histogram); + self->histogram = NULL; + // BinaryDelayEstimator does not have ownership of |farend|, hence we do not // free the memory here. That should be handled separately by the user. self->farend = NULL; @@ -160,9 +358,14 @@ BinaryDelayEstimator* WebRtc_CreateBinaryDelayEstimator( self->farend = farend; self->near_history_size = lookahead + 1; + self->robust_validation_enabled = 0; // Disabled by default. + self->allowed_offset = 0; - // Allocate memory for spectrum buffers. - self->mean_bit_counts = malloc(farend->history_size * sizeof(int32_t)); + // Allocate memory for spectrum buffers. The extra array element in + // |mean_bit_counts| and |histogram| is a dummy element only used while + // |last_delay| == -2, i.e., before we have a valid estimate. + self->mean_bit_counts = + malloc((farend->history_size + 1) * sizeof(int32_t)); malloc_fail |= (self->mean_bit_counts == NULL); self->bit_counts = malloc(farend->history_size * sizeof(int32_t)); @@ -172,6 +375,9 @@ BinaryDelayEstimator* WebRtc_CreateBinaryDelayEstimator( self->binary_near_history = malloc((lookahead + 1) * sizeof(uint32_t)); malloc_fail |= (self->binary_near_history == NULL); + self->histogram = malloc((farend->history_size + 1) * sizeof(float)); + malloc_fail |= (self->histogram == NULL); + if (malloc_fail) { WebRtc_FreeBinaryDelayEstimator(self); self = NULL; @@ -188,23 +394,31 @@ void WebRtc_InitBinaryDelayEstimator(BinaryDelayEstimator* self) { memset(self->bit_counts, 0, sizeof(int32_t) * self->farend->history_size); memset(self->binary_near_history, 0, sizeof(uint32_t) * self->near_history_size); - for (i = 0; i < self->farend->history_size; ++i) { + for (i = 0; i <= self->farend->history_size; ++i) { self->mean_bit_counts[i] = (20 << 9); // 20 in Q9. + self->histogram[i] = 0.f; } self->minimum_probability = (32 << 9); // 32 in Q9. self->last_delay_probability = (32 << 9); // 32 in Q9. // Default return value if we're unable to estimate. -1 is used for errors. self->last_delay = -2; + + self->last_candidate_delay = -2; + self->compare_delay = self->farend->history_size; + self->candidate_hits = 0; + self->last_delay_histogram = 0.f; } int WebRtc_ProcessBinarySpectrum(BinaryDelayEstimator* self, uint32_t binary_near_spectrum) { int i = 0; int candidate_delay = -1; + int valid_candidate = 0; int32_t value_best_candidate = 32 << 9; // 32 in Q9, (max |mean_bit_counts|). int32_t value_worst_candidate = 0; + int32_t valley_depth = 0; assert(self != NULL); if (self->near_history_size > 1) { @@ -249,6 +463,7 @@ int WebRtc_ProcessBinarySpectrum(BinaryDelayEstimator* self, value_worst_candidate = self->mean_bit_counts[i]; } } + valley_depth = value_worst_candidate - value_best_candidate; // The |value_best_candidate| is a good indicator on the probability of // |candidate_delay| being an accurate delay (a small |value_best_candidate| @@ -265,7 +480,7 @@ int WebRtc_ProcessBinarySpectrum(BinaryDelayEstimator* self, // Update |minimum_probability|. if ((self->minimum_probability > kProbabilityLowerLimit) && - (value_worst_candidate - value_best_candidate > kProbabilityMinSpread)) { + (valley_depth > kProbabilityMinSpread)) { // The "hard" threshold can't be lower than 17 (in Q9). // The valley in the curve also has to be distinct, i.e., the // difference between |value_worst_candidate| and |value_best_candidate| has @@ -281,16 +496,44 @@ int WebRtc_ProcessBinarySpectrum(BinaryDelayEstimator* self, // Update |last_delay_probability|. // We use a Markov type model, i.e., a slowly increasing level over time. self->last_delay_probability++; - if (value_worst_candidate > value_best_candidate + kProbabilityOffset) { - // Reliable delay value for usage. - if (value_best_candidate < self->minimum_probability) { - self->last_delay = candidate_delay; + // Validate |candidate_delay|. We have a reliable instantaneous delay + // estimate if + // 1) The valley is distinct enough (|valley_depth| > |kProbabilityOffset|) + // and + // 2) The depth of the valley is deep enough + // (|value_best_candidate| < |minimum_probability|) + // and deeper than the best estimate so far + // (|value_best_candidate| < |last_delay_probability|) + valid_candidate = ((valley_depth > kProbabilityOffset) && + ((value_best_candidate < self->minimum_probability) || + (value_best_candidate < self->last_delay_probability))); + + if (self->robust_validation_enabled) { + int is_histogram_valid = 0; + UpdateRobustValidationStatistics(self, candidate_delay, valley_depth, + value_best_candidate); + is_histogram_valid = HistogramBasedValidation(self, candidate_delay); + valid_candidate = RobustValidation(self, candidate_delay, valid_candidate, + is_histogram_valid); + + } + if (valid_candidate) { + if (candidate_delay != self->last_delay) { + self->last_delay_histogram = + (self->histogram[candidate_delay] > kLastHistogramMax ? + kLastHistogramMax : self->histogram[candidate_delay]); + // Adjust the histogram if we made a change to |last_delay|, though it was + // not the most likely one according to the histogram. + if (self->histogram[candidate_delay] < + self->histogram[self->compare_delay]) { + self->histogram[self->compare_delay] = self->histogram[candidate_delay]; + } } + self->last_delay = candidate_delay; if (value_best_candidate < self->last_delay_probability) { - self->last_delay = candidate_delay; - // Reset |last_delay_probability|. self->last_delay_probability = value_best_candidate; } + self->compare_delay = self->last_delay; } return self->last_delay; diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/utility/delay_estimator.h b/media/webrtc/trunk/webrtc/modules/audio_processing/utility/delay_estimator.h index bf2b08a217a7..b9a24bb5a228 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/utility/delay_estimator.h +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/utility/delay_estimator.h @@ -42,6 +42,15 @@ typedef struct { // Delay memory. int last_delay; + // Robust validation + int robust_validation_enabled; + int allowed_offset; + int last_candidate_delay; + int compare_delay; + int candidate_hits; + float* histogram; + float last_delay_histogram; + // Far-end binary spectrum history buffer etc. BinaryDelayEstimatorFarend* farend; } BinaryDelayEstimator; diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/utility/delay_estimator_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_processing/utility/delay_estimator_unittest.cc index f4b47114e745..c30ab08625a9 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/utility/delay_estimator_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/utility/delay_estimator_unittest.cc @@ -26,6 +26,9 @@ enum { kLookahead = 10 }; // Length of binary spectrum sequence. enum { kSequenceLength = 400 }; +const int kEnable[] = { 0, 1 }; +const size_t kSizeEnable = sizeof(kEnable) / sizeof(*kEnable); + class DelayEstimatorTest : public ::testing::Test { protected: DelayEstimatorTest(); @@ -38,7 +41,8 @@ class DelayEstimatorTest : public ::testing::Test { void RunBinarySpectra(BinaryDelayEstimator* binary1, BinaryDelayEstimator* binary2, int near_offset, int lookahead_offset, int far_offset); - void RunBinarySpectraTest(int near_offset, int lookahead_offset); + void RunBinarySpectraTest(int near_offset, int lookahead_offset, + int ref_robust_validation, int robust_validation); void* handle_; DelayEstimator* self_; @@ -143,6 +147,8 @@ void DelayEstimatorTest::RunBinarySpectra(BinaryDelayEstimator* binary1, int near_offset, int lookahead_offset, int far_offset) { + int different_validations = binary1->robust_validation_enabled ^ + binary2->robust_validation_enabled; WebRtc_InitBinaryDelayEstimatorFarend(binary_farend_); WebRtc_InitBinaryDelayEstimator(binary1); WebRtc_InitBinaryDelayEstimator(binary2); @@ -167,8 +173,19 @@ void DelayEstimatorTest::RunBinarySpectra(BinaryDelayEstimator* binary1, if ((delay_1 != -2) && (delay_2 != -2)) { EXPECT_EQ(delay_1, delay_2 - lookahead_offset - near_offset); } + // For the case of identical signals |delay_1| and |delay_2| should match + // all the time, unless one of them has robust validation turned on. In + // that case the robust validation leaves the initial state faster. if ((near_offset == 0) && (lookahead_offset == 0)) { - EXPECT_EQ(delay_1, delay_2); + if (!different_validations) { + EXPECT_EQ(delay_1, delay_2); + } else { + if (binary1->robust_validation_enabled) { + EXPECT_GE(delay_1, delay_2); + } else { + EXPECT_GE(delay_2, delay_1); + } + } } } // Verify that we have left the initialized state. @@ -179,7 +196,9 @@ void DelayEstimatorTest::RunBinarySpectra(BinaryDelayEstimator* binary1, } void DelayEstimatorTest::RunBinarySpectraTest(int near_offset, - int lookahead_offset) { + int lookahead_offset, + int ref_robust_validation, + int robust_validation) { BinaryDelayEstimator* binary2 = WebRtc_CreateBinaryDelayEstimator(binary_farend_, kLookahead + lookahead_offset); @@ -187,6 +206,8 @@ void DelayEstimatorTest::RunBinarySpectraTest(int near_offset, // the delay is equivalent with a positive |offset| of the far-end sequence. // For non-causal systems the delay is equivalent with a negative |offset| of // the far-end sequence. + binary_->robust_validation_enabled = ref_robust_validation; + binary2->robust_validation_enabled = robust_validation; for (int offset = -kLookahead; offset < kMaxDelay - lookahead_offset - near_offset; offset++) { @@ -194,6 +215,7 @@ void DelayEstimatorTest::RunBinarySpectraTest(int near_offset, } WebRtc_FreeBinaryDelayEstimator(binary2); binary2 = NULL; + binary_->robust_validation_enabled = 0; // Reset reference. } TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfWrapper) { @@ -246,6 +268,25 @@ TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfWrapper) { EXPECT_EQ(-1, WebRtc_AddFarSpectrumFix(farend_handle_, far_u16_, spectrum_size_, 16)); + // WebRtc_set_allowed_offset() should return -1 if we have: + // 1) NULL pointer as |handle|. + // 2) |allowed_offset| < 0. + EXPECT_EQ(-1, WebRtc_set_allowed_offset(NULL, 0)); + EXPECT_EQ(-1, WebRtc_set_allowed_offset(handle_, -1)); + + EXPECT_EQ(-1, WebRtc_get_allowed_offset(NULL)); + + // WebRtc_enable_robust_validation() should return -1 if we have: + // 1) NULL pointer as |handle|. + // 2) Incorrect |enable| value (not 0 or 1). + EXPECT_EQ(-1, WebRtc_enable_robust_validation(NULL, kEnable[0])); + EXPECT_EQ(-1, WebRtc_enable_robust_validation(handle_, -1)); + EXPECT_EQ(-1, WebRtc_enable_robust_validation(handle_, 2)); + + // WebRtc_is_robust_validation_enabled() should return -1 if we have NULL + // pointer as |handle|. + EXPECT_EQ(-1, WebRtc_is_robust_validation_enabled(NULL)); + // WebRtc_DelayEstimatorProcessFloat() should return -1 if we have: // 1) NULL pointer as |handle|. // 2) NULL pointer as near-end spectrum. @@ -283,6 +324,30 @@ TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfWrapper) { WebRtc_FreeDelayEstimator(handle); } +TEST_F(DelayEstimatorTest, VerifyAllowedOffset) { + // Is set to zero by default. + EXPECT_EQ(0, WebRtc_get_allowed_offset(handle_)); + for (int i = 1; i >= 0; i--) { + EXPECT_EQ(0, WebRtc_set_allowed_offset(handle_, i)); + EXPECT_EQ(i, WebRtc_get_allowed_offset(handle_)); + Init(); + // Unaffected over a reset. + EXPECT_EQ(i, WebRtc_get_allowed_offset(handle_)); + } +} + +TEST_F(DelayEstimatorTest, VerifyEnableRobustValidation) { + // Disabled by default. + EXPECT_EQ(0, WebRtc_is_robust_validation_enabled(handle_)); + for (size_t i = 0; i < kSizeEnable; ++i) { + EXPECT_EQ(0, WebRtc_enable_robust_validation(handle_, kEnable[i])); + EXPECT_EQ(kEnable[i], WebRtc_is_robust_validation_enabled(handle_)); + Init(); + // Unaffected over a reset. + EXPECT_EQ(kEnable[i], WebRtc_is_robust_validation_enabled(handle_)); + } +} + TEST_F(DelayEstimatorTest, InitializedSpectrumAfterProcess) { // In this test we verify that the mean spectra are initialized after first // time we call WebRtc_AddFarSpectrum() and Process() respectively. @@ -370,7 +435,7 @@ TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfBinaryEstimator) { BinaryDelayEstimator* binary_handle = binary_; // WebRtc_CreateBinaryDelayEstimator() should return -1 if we have a NULL - // pointer as |binary_handle| or invalid input values. Upon failure, the + // pointer as |binary_farend| or invalid input values. Upon failure, the // |binary_handle| should be NULL. // Make sure we have a non-NULL value at start, so we can detect NULL after // create failure. @@ -379,9 +444,6 @@ TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfBinaryEstimator) { binary_handle = binary_; binary_handle = WebRtc_CreateBinaryDelayEstimator(binary_farend_, -1); EXPECT_TRUE(binary_handle == NULL); - binary_handle = binary_; - binary_handle = WebRtc_CreateBinaryDelayEstimator(0, 0); - EXPECT_TRUE(binary_handle == NULL); } TEST_F(DelayEstimatorTest, MeanEstimatorFix) { @@ -410,26 +472,57 @@ TEST_F(DelayEstimatorTest, ExactDelayEstimateMultipleNearSameSpectrum) { // the signal accordingly. We create two Binary Delay Estimators and feed them // with the same signals, so they should output the same results. // We verify both causal and non-causal delays. + // For these noise free signals, the robust validation should not have an + // impact, hence we turn robust validation on/off for both reference and + // delayed near end. - RunBinarySpectraTest(0, 0); + for (size_t i = 0; i < kSizeEnable; ++i) { + for (size_t j = 0; j < kSizeEnable; ++j) { + RunBinarySpectraTest(0, 0, kEnable[i], kEnable[j]); + } + } } TEST_F(DelayEstimatorTest, ExactDelayEstimateMultipleNearDifferentSpectrum) { // In this test we use the same setup as above, but we now feed the two Binary // Delay Estimators with different signals, so they should output different // results. + // For these noise free signals, the robust validation should not have an + // impact, hence we turn robust validation on/off for both reference and + // delayed near end. const int kNearOffset = 1; - RunBinarySpectraTest(kNearOffset, 0); + for (size_t i = 0; i < kSizeEnable; ++i) { + for (size_t j = 0; j < kSizeEnable; ++j) { + RunBinarySpectraTest(kNearOffset, 0, kEnable[i], kEnable[j]); + } + } } TEST_F(DelayEstimatorTest, ExactDelayEstimateMultipleNearDifferentLookahead) { // In this test we use the same setup as above, feeding the two Binary // Delay Estimators with the same signals. The difference is that we create // them with different lookahead. + // For these noise free signals, the robust validation should not have an + // impact, hence we turn robust validation on/off for both reference and + // delayed near end. const int kLookaheadOffset = 1; - RunBinarySpectraTest(0, kLookaheadOffset); + for (size_t i = 0; i < kSizeEnable; ++i) { + for (size_t j = 0; j < kSizeEnable; ++j) { + RunBinarySpectraTest(0, kLookaheadOffset, kEnable[i], kEnable[j]); + } + } +} + +TEST_F(DelayEstimatorTest, AllowedOffsetNoImpactWhenRobustValidationDisabled) { + // The same setup as in ExactDelayEstimateMultipleNearSameSpectrum with the + // difference that |allowed_offset| is set for the reference binary delay + // estimator. + + binary_->allowed_offset = 10; + RunBinarySpectraTest(0, 0, 0, 0); + binary_->allowed_offset = 0; // Reset reference. } } // namespace diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.c b/media/webrtc/trunk/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.c index c358f13836b9..1e6eff0835a3 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.c +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.c @@ -16,7 +16,7 @@ #include "webrtc/modules/audio_processing/utility/delay_estimator.h" #include "webrtc/modules/audio_processing/utility/delay_estimator_internal.h" -#include "webrtc/system_wrappers/interface/compile_assert.h" +#include "webrtc/system_wrappers/interface/compile_assert_c.h" // Only bit |kBandFirst| through bit |kBandLast| are processed and // |kBandFirst| - |kBandLast| must be < 32. @@ -312,6 +312,47 @@ int WebRtc_InitDelayEstimator(void* handle) { return 0; } +int WebRtc_set_allowed_offset(void* handle, int allowed_offset) { + DelayEstimator* self = (DelayEstimator*) handle; + + if ((self == NULL) || (allowed_offset < 0)) { + return -1; + } + self->binary_handle->allowed_offset = allowed_offset; + return 0; +} + +int WebRtc_get_allowed_offset(const void* handle) { + const DelayEstimator* self = (const DelayEstimator*) handle; + + if (self == NULL) { + return -1; + } + return self->binary_handle->allowed_offset; +} + +int WebRtc_enable_robust_validation(void* handle, int enable) { + DelayEstimator* self = (DelayEstimator*) handle; + + if (self == NULL) { + return -1; + } + if ((enable < 0) || (enable > 1)) { + return -1; + } + self->binary_handle->robust_validation_enabled = enable; + return 0; +} + +int WebRtc_is_robust_validation_enabled(const void* handle) { + const DelayEstimator* self = (const DelayEstimator*) handle; + + if (self == NULL) { + return -1; + } + return self->binary_handle->robust_validation_enabled; +} + int WebRtc_DelayEstimatorProcessFix(void* handle, uint16_t* near_spectrum, int spectrum_size, diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h b/media/webrtc/trunk/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h index 51b9a0a1d5d6..5d11cd24db42 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h @@ -123,6 +123,35 @@ void* WebRtc_CreateDelayEstimator(void* farend_handle, int lookahead); // int WebRtc_InitDelayEstimator(void* handle); +// Sets the |allowed_offset| used in the robust validation scheme. If the +// delay estimator is used in an echo control component, this parameter is +// related to the filter length. In principle |allowed_offset| should be set to +// the echo control filter length minus the expected echo duration, i.e., the +// delay offset the echo control can handle without quality regression. The +// default value, used if not set manually, is zero. Note that |allowed_offset| +// has to be non-negative. +// Inputs: +// - handle : Pointer to the delay estimation instance. +// - allowed_offset : The amount of delay offset, measured in partitions, +// the echo control filter can handle. +int WebRtc_set_allowed_offset(void* handle, int allowed_offset); + +// Returns the |allowed_offset| in number of partitions. +int WebRtc_get_allowed_offset(const void* handle); + +// TODO(bjornv): Implement this functionality. Currently, enabling it has no +// impact, hence this is an empty API. +// Enables/Disables a robust validation functionality in the delay estimation. +// This is by default set to disabled at create time. The state is preserved +// over a reset. +// Inputs: +// - handle : Pointer to the delay estimation instance. +// - enable : Enable (1) or disable (0) this feature. +int WebRtc_enable_robust_validation(void* handle, int enable); + +// Returns 1 if robust validation is enabled and 0 if disabled. +int WebRtc_is_robust_validation_enabled(const void* handle); + // Estimates and returns the delay between the far-end and near-end blocks. The // value will be offset by the lookahead (i.e. the lookahead should be // subtracted from the returned value). diff --git a/media/webrtc/trunk/webrtc/modules/bitrate_controller/bitrate_controller.gypi b/media/webrtc/trunk/webrtc/modules/bitrate_controller/bitrate_controller.gypi index 5d37b91c2abe..e713282056fc 100644 --- a/media/webrtc/trunk/webrtc/modules/bitrate_controller/bitrate_controller.gypi +++ b/media/webrtc/trunk/webrtc/modules/bitrate_controller/bitrate_controller.gypi @@ -14,15 +14,6 @@ 'dependencies': [ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', ], - 'include_dirs': [ - '../interface', - ], - 'direct_dependent_settings': { - 'include_dirs': [ - '../interface', - '<(webrtc_root)/modules/rtp_rtcp/interface', - ], - }, 'sources': [ 'bitrate_controller_impl.cc', 'bitrate_controller_impl.h', diff --git a/media/webrtc/trunk/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc b/media/webrtc/trunk/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc index 20cc3aceec37..c2c8616559b8 100644 --- a/media/webrtc/trunk/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc +++ b/media/webrtc/trunk/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc @@ -11,6 +11,7 @@ #include "webrtc/modules/bitrate_controller/bitrate_controller_impl.h" +#include #include #include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h" @@ -69,17 +70,95 @@ class RtcpBandwidthObserverImpl : public RtcpBandwidthObserver { owner_->OnReceivedRtcpReceiverReport(fraction_lost_aggregate, rtt, total_number_of_packets, now_ms); } + private: std::map ssrc_to_last_received_extended_high_seq_num_; BitrateControllerImpl* owner_; }; -BitrateController* BitrateController::CreateBitrateController() { - return new BitrateControllerImpl(); +class LowRateStrategy { + public: + LowRateStrategy( + SendSideBandwidthEstimation* bandwidth_estimation, + BitrateControllerImpl::BitrateObserverConfList* bitrate_observers) + : bandwidth_estimation_(bandwidth_estimation), + bitrate_observers_(bitrate_observers) {} + + virtual ~LowRateStrategy() {} + + virtual void LowRateAllocation(uint32_t bitrate, + uint8_t fraction_loss, + uint32_t rtt, + uint32_t sum_min_bitrates) = 0; + + protected: + SendSideBandwidthEstimation* bandwidth_estimation_; + BitrateControllerImpl::BitrateObserverConfList* bitrate_observers_; +}; + +class EnforceMinRateStrategy : public LowRateStrategy { + public: + EnforceMinRateStrategy( + SendSideBandwidthEstimation* bandwidth_estimation, + BitrateControllerImpl::BitrateObserverConfList* bitrate_observers) + : LowRateStrategy(bandwidth_estimation, bitrate_observers) {} + + void LowRateAllocation(uint32_t bitrate, + uint8_t fraction_loss, + uint32_t rtt, + uint32_t sum_min_bitrates) { + // Min bitrate to all observers. + BitrateControllerImpl::BitrateObserverConfList::iterator it; + for (it = bitrate_observers_->begin(); it != bitrate_observers_->end(); + ++it) { + it->first->OnNetworkChanged(it->second->min_bitrate_, fraction_loss, + rtt); + } + // Set sum of min to current send bitrate. + bandwidth_estimation_->SetSendBitrate(sum_min_bitrates); + } +}; + +class NoEnforceMinRateStrategy : public LowRateStrategy { + public: + NoEnforceMinRateStrategy( + SendSideBandwidthEstimation* bandwidth_estimation, + BitrateControllerImpl::BitrateObserverConfList* bitrate_observers) + : LowRateStrategy(bandwidth_estimation, bitrate_observers) {} + + void LowRateAllocation(uint32_t bitrate, + uint8_t fraction_loss, + uint32_t rtt, + uint32_t sum_min_bitrates) { + // Allocate up to |min_bitrate_| to one observer at a time, until + // |bitrate| is depleted. + uint32_t remainder = bitrate; + BitrateControllerImpl::BitrateObserverConfList::iterator it; + for (it = bitrate_observers_->begin(); it != bitrate_observers_->end(); + ++it) { + uint32_t allocation = std::min(remainder, it->second->min_bitrate_); + it->first->OnNetworkChanged(allocation, fraction_loss, rtt); + remainder -= allocation; + } + // Set |bitrate| to current send bitrate. + bandwidth_estimation_->SetSendBitrate(bitrate); + } +}; + +BitrateController* BitrateController::CreateBitrateController( + bool enforce_min_bitrate) { + return new BitrateControllerImpl(enforce_min_bitrate); } -BitrateControllerImpl::BitrateControllerImpl() +BitrateControllerImpl::BitrateControllerImpl(bool enforce_min_bitrate) : critsect_(CriticalSectionWrapper::CreateCriticalSection()) { + if (enforce_min_bitrate) { + low_rate_strategy_.reset(new EnforceMinRateStrategy( + &bandwidth_estimation_, &bitrate_observers_)); + } else { + low_rate_strategy_.reset(new NoEnforceMinRateStrategy( + &bandwidth_estimation_, &bitrate_observers_)); + } } BitrateControllerImpl::~BitrateControllerImpl() { @@ -159,6 +238,17 @@ void BitrateControllerImpl::RemoveBitrateObserver(BitrateObserver* observer) { } } +void BitrateControllerImpl::EnforceMinBitrate(bool enforce_min_bitrate) { + CriticalSectionScoped cs(critsect_); + if (enforce_min_bitrate) { + low_rate_strategy_.reset(new EnforceMinRateStrategy( + &bandwidth_estimation_, &bitrate_observers_)); + } else { + low_rate_strategy_.reset(new NoEnforceMinRateStrategy( + &bandwidth_estimation_, &bitrate_observers_)); + } +} + void BitrateControllerImpl::OnReceivedEstimatedBitrate(const uint32_t bitrate) { uint32_t new_bitrate = 0; uint8_t fraction_lost = 0; @@ -201,15 +291,8 @@ void BitrateControllerImpl::OnNetworkChanged(const uint32_t bitrate, sum_min_bitrates += it->second->min_bitrate_; } if (bitrate <= sum_min_bitrates) { - // Min bitrate to all observers. - for (it = bitrate_observers_.begin(); it != bitrate_observers_.end(); - ++it) { - it->first->OnNetworkChanged(it->second->min_bitrate_, fraction_loss, - rtt); - } - // Set sum of min to current send bitrate. - bandwidth_estimation_.SetSendBitrate(sum_min_bitrates); - return; + return low_rate_strategy_->LowRateAllocation(bitrate, fraction_loss, rtt, + sum_min_bitrates); } uint32_t bitrate_per_observer = (bitrate - sum_min_bitrates) / number_of_observers; @@ -248,4 +331,5 @@ void BitrateControllerImpl::OnNetworkChanged(const uint32_t bitrate, bool BitrateControllerImpl::AvailableBandwidth(uint32_t* bandwidth) const { return bandwidth_estimation_.AvailableBandwidth(bandwidth); } + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/bitrate_controller/bitrate_controller_impl.h b/media/webrtc/trunk/webrtc/modules/bitrate_controller/bitrate_controller_impl.h index 4c891d9d4cc0..62ed6fda5ab1 100644 --- a/media/webrtc/trunk/webrtc/modules/bitrate_controller/bitrate_controller_impl.h +++ b/media/webrtc/trunk/webrtc/modules/bitrate_controller/bitrate_controller_impl.h @@ -19,33 +19,21 @@ #include #include +#include #include "webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h" #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" namespace webrtc { class RtcpBandwidthObserverImpl; +class LowRateStrategy; class BitrateControllerImpl : public BitrateController { public: friend class RtcpBandwidthObserverImpl; - explicit BitrateControllerImpl(); - virtual ~BitrateControllerImpl(); - - virtual bool AvailableBandwidth(uint32_t* bandwidth) const OVERRIDE; - - virtual RtcpBandwidthObserver* CreateRtcpBandwidthObserver() OVERRIDE; - - virtual void SetBitrateObserver(BitrateObserver* observer, - const uint32_t start_bitrate, - const uint32_t min_bitrate, - const uint32_t max_bitrate) OVERRIDE; - - virtual void RemoveBitrateObserver(BitrateObserver* observer) OVERRIDE; - - protected: struct BitrateConfiguration { BitrateConfiguration(uint32_t start_bitrate, uint32_t min_bitrate, @@ -67,7 +55,27 @@ class BitrateControllerImpl : public BitrateController { BitrateObserver* observer_; uint32_t min_bitrate_; }; + typedef std::pair + BitrateObserverConfiguration; + typedef std::list BitrateObserverConfList; + explicit BitrateControllerImpl(bool enforce_min_bitrate); + virtual ~BitrateControllerImpl(); + + virtual bool AvailableBandwidth(uint32_t* bandwidth) const OVERRIDE; + + virtual RtcpBandwidthObserver* CreateRtcpBandwidthObserver() OVERRIDE; + + virtual void SetBitrateObserver(BitrateObserver* observer, + const uint32_t start_bitrate, + const uint32_t min_bitrate, + const uint32_t max_bitrate) OVERRIDE; + + virtual void RemoveBitrateObserver(BitrateObserver* observer) OVERRIDE; + + virtual void EnforceMinBitrate(bool enforce_min_bitrate) OVERRIDE; + + private: // Called by BitrateObserver's direct from the RTCP module. void OnReceivedEstimatedBitrate(const uint32_t bitrate); @@ -76,11 +84,7 @@ class BitrateControllerImpl : public BitrateController { const int number_of_packets, const uint32_t now_ms); - private: typedef std::multimap ObserverSortingMap; - typedef std::pair - BitrateObserverConfiguration; - typedef std::list BitrateObserverConfList; BitrateObserverConfList::iterator FindObserverConfigurationPair(const BitrateObserver* observer); @@ -91,6 +95,7 @@ class BitrateControllerImpl : public BitrateController { CriticalSectionWrapper* critsect_; SendSideBandwidthEstimation bandwidth_estimation_; BitrateObserverConfList bitrate_observers_; + scoped_ptr low_rate_strategy_; }; } // namespace webrtc #endif // WEBRTC_MODULES_BITRATE_CONTROLLER_BITRATE_CONTROLLER_IMPL_H_ diff --git a/media/webrtc/trunk/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc b/media/webrtc/trunk/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc index 7abe71b51058..30f85a81cb91 100644 --- a/media/webrtc/trunk/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc @@ -57,12 +57,12 @@ class TestBitrateObserver: public BitrateObserver { class BitrateControllerTest : public ::testing::Test { protected: - BitrateControllerTest() { - } + BitrateControllerTest() : enforce_min_bitrate_(true) {} ~BitrateControllerTest() {} virtual void SetUp() { - controller_ = BitrateController::CreateBitrateController(); + controller_ = + BitrateController::CreateBitrateController(enforce_min_bitrate_); bandwidth_observer_ = controller_->CreateRtcpBandwidthObserver(); } @@ -70,6 +70,7 @@ class BitrateControllerTest : public ::testing::Test { delete bandwidth_observer_; delete controller_; } + bool enforce_min_bitrate_; BitrateController* controller_; RtcpBandwidthObserver* bandwidth_observer_; }; @@ -414,3 +415,86 @@ TEST_F(BitrateControllerTest, TwoBitrateObserversOneRtcpObserver) { controller_->RemoveBitrateObserver(&bitrate_observer_1); controller_->RemoveBitrateObserver(&bitrate_observer_2); } + +class BitrateControllerTestNoEnforceMin : public BitrateControllerTest { + protected: + BitrateControllerTestNoEnforceMin() : BitrateControllerTest() { + enforce_min_bitrate_ = false; + } +}; + +// The following three tests verify that the EnforceMinBitrate() method works +// as intended. +TEST_F(BitrateControllerTestNoEnforceMin, OneBitrateObserver) { + TestBitrateObserver bitrate_observer_1; + controller_->SetBitrateObserver(&bitrate_observer_1, 200000, 100000, 400000); + + // High REMB. + bandwidth_observer_->OnReceivedEstimatedBitrate(150000); + EXPECT_EQ(150000u, bitrate_observer_1.last_bitrate_); + + // Low REMB. + bandwidth_observer_->OnReceivedEstimatedBitrate(1000); + EXPECT_EQ(1000u, bitrate_observer_1.last_bitrate_); + + controller_->RemoveBitrateObserver(&bitrate_observer_1); +} + +TEST_F(BitrateControllerTestNoEnforceMin, ThreeBitrateObservers) { + TestBitrateObserver bitrate_observer_1; + TestBitrateObserver bitrate_observer_2; + TestBitrateObserver bitrate_observer_3; + // Set up the observers with min bitrates at 100000, 200000, and 300000. + // Note: The start bitrate of bitrate_observer_1 (700000) is used as the + // overall start bitrate. + controller_->SetBitrateObserver(&bitrate_observer_1, 700000, 100000, 400000); + controller_->SetBitrateObserver(&bitrate_observer_2, 200000, 200000, 400000); + controller_->SetBitrateObserver(&bitrate_observer_3, 200000, 300000, 400000); + + // High REMB. Make sure the controllers get a fair share of the surplus + // (i.e., what is left after each controller gets its min rate). + bandwidth_observer_->OnReceivedEstimatedBitrate(690000); + // Verify that each observer gets its min rate (sum of min rates is 600000), + // and that the remaining 90000 is divided equally among the three. + EXPECT_EQ(130000u, bitrate_observer_1.last_bitrate_); + EXPECT_EQ(230000u, bitrate_observer_2.last_bitrate_); + EXPECT_EQ(330000u, bitrate_observer_3.last_bitrate_); + + // High REMB, but below the sum of min bitrates. + bandwidth_observer_->OnReceivedEstimatedBitrate(500000); + // Verify that the first and second observers get their min bitrates, and the + // third gets the remainder. + EXPECT_EQ(100000u, bitrate_observer_1.last_bitrate_); // Min bitrate. + EXPECT_EQ(200000u, bitrate_observer_2.last_bitrate_); // Min bitrate. + EXPECT_EQ(200000u, bitrate_observer_3.last_bitrate_); // Remainder. + + // Low REMB. + bandwidth_observer_->OnReceivedEstimatedBitrate(1000); + // Verify that the first observer gets all the rate, and the rest get zero. + EXPECT_EQ(1000u, bitrate_observer_1.last_bitrate_); + EXPECT_EQ(0u, bitrate_observer_2.last_bitrate_); + EXPECT_EQ(0u, bitrate_observer_3.last_bitrate_); + + controller_->RemoveBitrateObserver(&bitrate_observer_1); + controller_->RemoveBitrateObserver(&bitrate_observer_2); + controller_->RemoveBitrateObserver(&bitrate_observer_3); +} + +TEST_F(BitrateControllerTest, ThreeBitrateObserversLowRembEnforceMin) { + TestBitrateObserver bitrate_observer_1; + TestBitrateObserver bitrate_observer_2; + TestBitrateObserver bitrate_observer_3; + controller_->SetBitrateObserver(&bitrate_observer_1, 200000, 100000, 300000); + controller_->SetBitrateObserver(&bitrate_observer_2, 200000, 200000, 300000); + controller_->SetBitrateObserver(&bitrate_observer_3, 200000, 300000, 300000); + + // Low REMB. Verify that all observers still get their respective min bitrate. + bandwidth_observer_->OnReceivedEstimatedBitrate(1000); + EXPECT_EQ(100000u, bitrate_observer_1.last_bitrate_); // Min cap. + EXPECT_EQ(200000u, bitrate_observer_2.last_bitrate_); // Min cap. + EXPECT_EQ(300000u, bitrate_observer_3.last_bitrate_); // Min cap. + + controller_->RemoveBitrateObserver(&bitrate_observer_1); + controller_->RemoveBitrateObserver(&bitrate_observer_2); + controller_->RemoveBitrateObserver(&bitrate_observer_3); +} diff --git a/media/webrtc/trunk/webrtc/modules/bitrate_controller/include/bitrate_controller.h b/media/webrtc/trunk/webrtc/modules/bitrate_controller/include/bitrate_controller.h index d74be1695dd8..0f7436765816 100644 --- a/media/webrtc/trunk/webrtc/modules/bitrate_controller/include/bitrate_controller.h +++ b/media/webrtc/trunk/webrtc/modules/bitrate_controller/include/bitrate_controller.h @@ -43,7 +43,12 @@ class BitrateController { * BitrateObservers. */ public: - static BitrateController* CreateBitrateController(); + // The argument |enforce_min_bitrate| controls the behavior when the available + // bitrate is lower than the minimum bitrate, or the sum of minimum bitrates. + // When true, the bitrate will never be set lower than the minimum bitrate(s). + // When false, the bitrate observers will be allocated rates up to their + // respective minimum bitrate, satisfying one observer after the other. + static BitrateController* CreateBitrateController(bool enforce_min_bitrate); virtual ~BitrateController() {} virtual RtcpBandwidthObserver* CreateRtcpBandwidthObserver() = 0; @@ -65,6 +70,9 @@ class BitrateController { const uint32_t max_bitrate) = 0; virtual void RemoveBitrateObserver(BitrateObserver* observer) = 0; + + // Changes the mode that was set in the constructor. + virtual void EnforceMinBitrate(bool enforce_min_bitrate) = 0; }; } // namespace webrtc #endif // WEBRTC_MODULES_BITRATE_CONTROLLER_INCLUDE_BITRATE_CONTROLLER_H_ diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc new file mode 100644 index 000000000000..05e2a9b2b525 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc @@ -0,0 +1,170 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/desktop_capture/desktop_and_cursor_composer.h" + +#include + +#include "webrtc/modules/desktop_capture/desktop_capturer.h" +#include "webrtc/modules/desktop_capture/desktop_frame.h" +#include "webrtc/modules/desktop_capture/mouse_cursor.h" + +namespace webrtc { + +namespace { + +// Helper function that blends one image into another. Source image must be +// pre-multiplied with the alpha channel. Destination is assumed to be opaque. +void AlphaBlend(uint8_t* dest, int dest_stride, + const uint8_t* src, int src_stride, + const DesktopSize& size) { + for (int y = 0; y < size.height(); ++y) { + for (int x = 0; x < size.width(); ++x) { + uint32_t base_alpha = 255 - src[x * DesktopFrame::kBytesPerPixel + 3]; + if (base_alpha == 255) { + continue; + } else if (base_alpha == 0) { + memcpy(dest + x * DesktopFrame::kBytesPerPixel, + src + x * DesktopFrame::kBytesPerPixel, + DesktopFrame::kBytesPerPixel); + } else { + dest[x * DesktopFrame::kBytesPerPixel] = + dest[x * DesktopFrame::kBytesPerPixel] * base_alpha / 255 + + src[x * DesktopFrame::kBytesPerPixel]; + dest[x * DesktopFrame::kBytesPerPixel + 1] = + dest[x * DesktopFrame::kBytesPerPixel + 1] * base_alpha / 255 + + src[x * DesktopFrame::kBytesPerPixel + 1]; + dest[x * DesktopFrame::kBytesPerPixel + 2] = + dest[x * DesktopFrame::kBytesPerPixel + 2] * base_alpha / 255 + + src[x * DesktopFrame::kBytesPerPixel + 2]; + } + } + src += src_stride; + dest += dest_stride; + } +} + +// DesktopFrame wrapper that draws mouse on a frame and restores original +// content before releasing the underlying frame. +class DesktopFrameWithCursor : public DesktopFrame { + public: + // Takes ownership of |frame|. + DesktopFrameWithCursor(DesktopFrame* frame, + const MouseCursor& cursor, + const DesktopVector& position); + virtual ~DesktopFrameWithCursor(); + + private: + scoped_ptr original_frame_; + + DesktopVector restore_position_; + scoped_ptr restore_frame_; + + DISALLOW_COPY_AND_ASSIGN(DesktopFrameWithCursor); +}; + +DesktopFrameWithCursor::DesktopFrameWithCursor(DesktopFrame* frame, + const MouseCursor& cursor, + const DesktopVector& position) + : DesktopFrame(frame->size(), frame->stride(), + frame->data(), frame->shared_memory()), + original_frame_(frame) { + set_dpi(frame->dpi()); + set_capture_time_ms(frame->capture_time_ms()); + mutable_updated_region()->Swap(frame->mutable_updated_region()); + + DesktopVector image_pos = position.subtract(cursor.hotspot()); + DesktopRect target_rect = DesktopRect::MakeSize(cursor.image()->size()); + target_rect.Translate(image_pos); + DesktopVector target_origin = target_rect.top_left(); + target_rect.IntersectWith(DesktopRect::MakeSize(size())); + + if (target_rect.is_empty()) + return; + + // Copy original screen content under cursor to |restore_frame_|. + restore_position_ = target_rect.top_left(); + restore_frame_.reset(new BasicDesktopFrame(target_rect.size())); + restore_frame_->CopyPixelsFrom(*this, target_rect.top_left(), + DesktopRect::MakeSize(restore_frame_->size())); + + // Blit the cursor. + uint8_t* target_rect_data = reinterpret_cast(data()) + + target_rect.top() * stride() + + target_rect.left() * DesktopFrame::kBytesPerPixel; + DesktopVector origin_shift = target_rect.top_left().subtract(target_origin); + AlphaBlend(target_rect_data, stride(), + cursor.image()->data() + + origin_shift.y() * cursor.image()->stride() + + origin_shift.x() * DesktopFrame::kBytesPerPixel, + cursor.image()->stride(), + target_rect.size()); +} + +DesktopFrameWithCursor::~DesktopFrameWithCursor() { + // Restore original content of the frame. + if (restore_frame_.get()) { + DesktopRect target_rect = DesktopRect::MakeSize(restore_frame_->size()); + target_rect.Translate(restore_position_); + CopyPixelsFrom(restore_frame_->data(), restore_frame_->stride(), + target_rect); + } +} + +} // namespace + +DesktopAndCursorComposer::DesktopAndCursorComposer( + DesktopCapturer* desktop_capturer, + MouseCursorMonitor* mouse_monitor) + : desktop_capturer_(desktop_capturer), + mouse_monitor_(mouse_monitor) { +} + +DesktopAndCursorComposer::~DesktopAndCursorComposer() {} + +void DesktopAndCursorComposer::Start(DesktopCapturer::Callback* callback) { + callback_ = callback; + if (mouse_monitor_.get()) + mouse_monitor_->Init(this, MouseCursorMonitor::SHAPE_AND_POSITION); + desktop_capturer_->Start(this); +} + +void DesktopAndCursorComposer::Capture(const DesktopRegion& region) { + if (mouse_monitor_.get()) + mouse_monitor_->Capture(); + desktop_capturer_->Capture(region); +} + +SharedMemory* DesktopAndCursorComposer::CreateSharedMemory(size_t size) { + return callback_->CreateSharedMemory(size); +} + +void DesktopAndCursorComposer::OnCaptureCompleted(DesktopFrame* frame) { + if (frame && cursor_.get() && cursor_state_ == MouseCursorMonitor::INSIDE) { + DesktopFrameWithCursor* frame_with_cursor = + new DesktopFrameWithCursor(frame, *cursor_, cursor_position_); + frame = frame_with_cursor; + } + + callback_->OnCaptureCompleted(frame); +} + +void DesktopAndCursorComposer::OnMouseCursor(MouseCursor* cursor) { + cursor_.reset(cursor); +} + +void DesktopAndCursorComposer::OnMouseCursorPosition( + MouseCursorMonitor::CursorState state, + const DesktopVector& position) { + cursor_state_ = state; + cursor_position_ = position; +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h b/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h new file mode 100644 index 000000000000..4f7c85bde3e5 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_DESKTOP_AND_CURSOR_COMPOSER_H_ +#define WEBRTC_MODULES_DESKTOP_CAPTURE_DESKTOP_AND_CURSOR_COMPOSER_H_ + +#include "webrtc/modules/desktop_capture/desktop_capturer.h" +#include "webrtc/modules/desktop_capture/mouse_cursor_monitor.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" + +namespace webrtc { + +// A wrapper for DesktopCapturer that also captures mouse using specified +// MouseCursorMonitor and renders it on the generated streams. +class DesktopAndCursorComposer : public DesktopCapturer, + public DesktopCapturer::Callback, + public MouseCursorMonitor::Callback { + public: + // Creates a new blender that captures mouse cursor using |mouse_monitor| and + // renders it into the frames generated by |desktop_capturer|. If + // |mouse_monitor| is NULL the frames are passed unmodified. Takes ownership + // of both arguments. + DesktopAndCursorComposer(DesktopCapturer* desktop_capturer, + MouseCursorMonitor* mouse_monitor); + virtual ~DesktopAndCursorComposer(); + + // DesktopCapturer interface. + virtual void Start(DesktopCapturer::Callback* callback) OVERRIDE; + virtual void Capture(const DesktopRegion& region) OVERRIDE; + + private: + // DesktopCapturer::Callback interface. + virtual SharedMemory* CreateSharedMemory(size_t size) OVERRIDE; + virtual void OnCaptureCompleted(DesktopFrame* frame) OVERRIDE; + + // MouseCursorMonitor::Callback interface. + virtual void OnMouseCursor(MouseCursor* cursor) OVERRIDE; + virtual void OnMouseCursorPosition(MouseCursorMonitor::CursorState state, + const DesktopVector& position) OVERRIDE; + + scoped_ptr desktop_capturer_; + scoped_ptr mouse_monitor_; + + DesktopCapturer::Callback* callback_; + + scoped_ptr cursor_; + MouseCursorMonitor::CursorState cursor_state_; + DesktopVector cursor_position_; + + DISALLOW_COPY_AND_ASSIGN(DesktopAndCursorComposer); +}; + +} // namespace webrtc + +#endif // WEBRTC_MODULES_DESKTOP_CAPTURE_DESKTOP_AND_CURSOR_COMPOSER_H_ diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_and_cursor_composer_unittest.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_and_cursor_composer_unittest.cc new file mode 100644 index 000000000000..b482a29605e6 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_and_cursor_composer_unittest.cc @@ -0,0 +1,258 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/desktop_capture/desktop_and_cursor_composer.h" + +#include "gtest/gtest.h" +#include "webrtc/modules/desktop_capture/desktop_capture_options.h" +#include "webrtc/modules/desktop_capture/desktop_frame.h" +#include "webrtc/modules/desktop_capture/mouse_cursor.h" +#include "webrtc/modules/desktop_capture/shared_desktop_frame.h" +#include "webrtc/modules/desktop_capture/window_capturer.h" +#include "webrtc/system_wrappers/interface/logging.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" + +namespace webrtc { + +namespace { + +const int kScreenWidth = 100; +const int kScreenHeight = 100; +const int kCursorWidth = 10; +const int kCursorHeight = 10; + +const int kTestCursorSize = 3; +const uint32_t kTestCursorData[kTestCursorSize][kTestCursorSize] = { + { 0xffffffff, 0x99990000, 0xaa222222, }, + { 0x88008800, 0xaa0000aa, 0xaa333333, }, + { 0x00000000, 0xaa0000aa, 0xaa333333, }, +}; + +uint32_t GetFakeFramePixelValue(const DesktopVector& p) { + uint32_t r = 100 + p.x(); + uint32_t g = 100 + p.y(); + uint32_t b = 100 + p.x() + p.y(); + return b + (g << 8) + (r << 16) + 0xff000000; +} + +uint32_t GetFramePixel(const DesktopFrame& frame, const DesktopVector& pos) { + return *reinterpret_cast(frame.data() + pos.y() * frame.stride() + + pos.x() * DesktopFrame::kBytesPerPixel); +} + +// Blends two pixel values taking into account alpha. +uint32_t BlendPixels(uint32_t dest, uint32_t src) { + uint8_t alpha = 255 - ((src & 0xff000000) >> 24); + uint32_t r = + ((dest & 0x00ff0000) >> 16) * alpha / 255 + ((src & 0x00ff0000) >> 16); + uint32_t g = + ((dest & 0x0000ff00) >> 8) * alpha / 255 + ((src & 0x0000ff00) >> 8); + uint32_t b = (dest & 0x000000ff) * alpha / 255 + (src & 0x000000ff); + return b + (g << 8) + (r << 16) + 0xff000000; +} + +DesktopFrame* CreateTestFrame() { + DesktopFrame* frame = + new BasicDesktopFrame(DesktopSize(kScreenWidth, kScreenHeight)); + uint32_t* data = reinterpret_cast(frame->data()); + for (int y = 0; y < kScreenHeight; ++y) { + for (int x = 0; x < kScreenWidth; ++x) { + *(data++) = GetFakeFramePixelValue(DesktopVector(x, y)); + } + } + return frame; +} + +class FakeScreenCapturer : public DesktopCapturer { + public: + FakeScreenCapturer() {} + + virtual void Start(Callback* callback) OVERRIDE { + callback_ = callback; + } + + virtual void Capture(const DesktopRegion& region) OVERRIDE { + callback_->OnCaptureCompleted(next_frame_.release()); + } + + void SetNextFrame(DesktopFrame* next_frame) { + next_frame_.reset(next_frame); + } + + private: + Callback* callback_; + + scoped_ptr next_frame_; +}; + +class FakeMouseMonitor : public MouseCursorMonitor { + public: + FakeMouseMonitor() : changed_(true) {} + + void SetState(CursorState state, const DesktopVector& pos) { + state_ = state; + position_ = pos; + } + + void SetHotspot(const DesktopVector& hotspot) { + if (!hotspot_.equals(hotspot)) + changed_ = true; + hotspot_ = hotspot; + } + + virtual void Init(Callback* callback, Mode mode) OVERRIDE { + callback_ = callback; + } + + virtual void Capture() OVERRIDE { + if (changed_) { + scoped_ptr image( + new BasicDesktopFrame(DesktopSize(kCursorWidth, kCursorHeight))); + uint32_t* data = reinterpret_cast(image->data()); + memset(data, 0, image->stride() * kCursorHeight); + + // Set four pixels near the hotspot and leave all other blank. + for (int y = 0; y < kTestCursorSize; ++y) { + for (int x = 0; x < kTestCursorSize; ++x) { + data[(hotspot_.y() + y) * kCursorWidth + (hotspot_.x() + x)] = + kTestCursorData[y][x]; + } + } + + callback_->OnMouseCursor(new MouseCursor(image.release(), hotspot_)); + } + + callback_->OnMouseCursorPosition(state_, position_); + } + + private: + Callback* callback_; + CursorState state_; + DesktopVector position_; + DesktopVector hotspot_; + bool changed_; +}; + +void VerifyFrame(const DesktopFrame& frame, + MouseCursorMonitor::CursorState state, + const DesktopVector& pos) { + // Verify that all other pixels are set to their original values. + DesktopRect image_rect = + DesktopRect::MakeWH(kTestCursorSize, kTestCursorSize); + image_rect.Translate(pos); + + for (int y = 0; y < kScreenHeight; ++y) { + for (int x = 0; x < kScreenWidth; ++x) { + DesktopVector p(x, y); + if (state == MouseCursorMonitor::INSIDE && image_rect.Contains(p)) { + EXPECT_EQ(BlendPixels(GetFakeFramePixelValue(p), + kTestCursorData[y - pos.y()][x - pos.x()]), + GetFramePixel(frame, p)); + } else { + EXPECT_EQ(GetFakeFramePixelValue(p), GetFramePixel(frame, p)); + } + } + } +} + +class DesktopAndCursorComposerTest : public testing::Test, + public DesktopCapturer::Callback { + public: + DesktopAndCursorComposerTest() + : fake_screen_(new FakeScreenCapturer()), + fake_cursor_(new FakeMouseMonitor()), + blender_(fake_screen_, fake_cursor_) { + } + + // DesktopCapturer::Callback interface + virtual SharedMemory* CreateSharedMemory(size_t size) OVERRIDE { + return NULL; + } + + virtual void OnCaptureCompleted(DesktopFrame* frame) OVERRIDE { + frame_.reset(frame); + } + + protected: + // Owned by |blender_|. + FakeScreenCapturer* fake_screen_; + FakeMouseMonitor* fake_cursor_; + + DesktopAndCursorComposer blender_; + scoped_ptr frame_; +}; + +// Verify DesktopAndCursorComposer can handle the case when the screen capturer +// fails. +TEST_F(DesktopAndCursorComposerTest, Error) { + blender_.Start(this); + + fake_cursor_->SetHotspot(DesktopVector()); + fake_cursor_->SetState(MouseCursorMonitor::INSIDE, DesktopVector()); + fake_screen_->SetNextFrame(NULL); + + blender_.Capture(DesktopRegion()); + + EXPECT_EQ(frame_, static_cast(NULL)); +} + +TEST_F(DesktopAndCursorComposerTest, Blend) { + struct { + int x, y; + int hotspot_x, hotspot_y; + bool inside; + } tests[] = { + {0, 0, 0, 0, true}, + {50, 50, 0, 0, true}, + {100, 50, 0, 0, true}, + {50, 100, 0, 0, true}, + {100, 100, 0, 0, true}, + {0, 0, 2, 5, true}, + {1, 1, 2, 5, true}, + {50, 50, 2, 5, true}, + {100, 100, 2, 5, true}, + {0, 0, 5, 2, true}, + {50, 50, 5, 2, true}, + {100, 100, 5, 2, true}, + {0, 0, 0, 0, false}, + }; + + blender_.Start(this); + + for (size_t i = 0; i < (sizeof(tests) / sizeof(tests[0])); ++i) { + SCOPED_TRACE(i); + + DesktopVector hotspot(tests[i].hotspot_x, tests[i].hotspot_y); + fake_cursor_->SetHotspot(hotspot); + + MouseCursorMonitor::CursorState state = tests[i].inside + ? MouseCursorMonitor::INSIDE + : MouseCursorMonitor::OUTSIDE; + DesktopVector pos(tests[i].x, tests[i].y); + fake_cursor_->SetState(state, pos); + + scoped_ptr frame( + SharedDesktopFrame::Wrap(CreateTestFrame())); + fake_screen_->SetNextFrame(frame->Share()); + + blender_.Capture(DesktopRegion()); + + VerifyFrame(*frame_, state, pos); + + // Verify that the cursor is erased before the frame buffer is returned to + // the screen capturer. + frame_.reset(); + VerifyFrame(*frame, MouseCursorMonitor::OUTSIDE, DesktopVector()); + } +} + +} // namespace + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_capture.gypi b/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_capture.gypi index 0bc3839ef441..9463257ce995 100644 --- a/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_capture.gypi +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_capture.gypi @@ -15,6 +15,9 @@ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', ], 'sources': [ + "desktop_and_cursor_composer.cc", + "desktop_and_cursor_composer.h", + "desktop_capture_types.h", "desktop_capturer.h", "desktop_frame.cc", "desktop_frame.h", @@ -22,6 +25,9 @@ "desktop_frame_win.h", "desktop_geometry.cc", "desktop_geometry.h", + "desktop_capture_options.h", + "desktop_capture_options.cc", + "desktop_capturer.h", "desktop_region.cc", "desktop_region.h", "differ.cc", @@ -30,11 +36,22 @@ "differ_block.h", "mac/desktop_configuration.h", "mac/desktop_configuration.mm", + "mac/desktop_configuration_monitor.h", + "mac/desktop_configuration_monitor.cc", + "mac/osx_version.h", + "mac/osx_version.cc", "mac/scoped_pixel_buffer_object.cc", "mac/scoped_pixel_buffer_object.h", + "mouse_cursor.cc", + "mouse_cursor.h", + "mouse_cursor_monitor.h", + "mouse_cursor_monitor_mac.mm", + "mouse_cursor_monitor_win.cc", + "mouse_cursor_monitor_x11.cc", "mouse_cursor_shape.h", "screen_capture_frame_queue.cc", "screen_capture_frame_queue.h", + "screen_capturer.cc", "screen_capturer.h", "screen_capturer_helper.cc", "screen_capturer_helper.h", @@ -52,10 +69,13 @@ "win/scoped_gdi_object.h", "win/scoped_thread_desktop.cc", "win/scoped_thread_desktop.h", + "window_capturer.cc", "window_capturer.h", "window_capturer_mac.cc", "window_capturer_win.cc", "window_capturer_x11.cc", + "x11/shared_x_display.h", + "x11/shared_x_display.cc", "x11/x_error_trap.cc", "x11/x_error_trap.h", "x11/x_server_pixel_buffer.cc", @@ -81,6 +101,7 @@ }], ['OS!="win" and OS!="mac" and use_x11==0', { 'sources': [ + "mouse_cursor_monitor_null.cc", "screen_capturer_null.cc", "window_capturer_null.cc", ], diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_capture_options.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_capture_options.cc new file mode 100644 index 000000000000..26044e127f2c --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_capture_options.cc @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/desktop_capture/desktop_capture_options.h" + +namespace webrtc { + +DesktopCaptureOptions::DesktopCaptureOptions() + : use_update_notifications_(true), + disable_effects_(true) { +#if defined(USE_X11) + // XDamage is often broken, so don't use it by default. + use_update_notifications_ = false; +#endif +} + +DesktopCaptureOptions::~DesktopCaptureOptions() {} + +// static +DesktopCaptureOptions DesktopCaptureOptions::CreateDefault() { + DesktopCaptureOptions result; +#if defined(USE_X11) + result.set_x_display(SharedXDisplay::CreateDefault()); +#endif +#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) + result.set_configuration_monitor(new DesktopConfigurationMonitor()); +#endif + return result; +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_capture_options.h b/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_capture_options.h new file mode 100644 index 000000000000..2a188a03a36b --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_capture_options.h @@ -0,0 +1,83 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_OPTIONS_H_ +#define WEBRTC_MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_OPTIONS_H_ + +#include "webrtc/system_wrappers/interface/constructor_magic.h" +#include "webrtc/system_wrappers/interface/scoped_refptr.h" + +#if defined(USE_X11) +#include "webrtc/modules/desktop_capture/x11/shared_x_display.h" +#endif + +#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) +#include "webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h" +#endif + +namespace webrtc { + +// An object that stores initialization parameters for screen and window +// capturers. +class DesktopCaptureOptions { + public: + // Creates an empty Options instance (e.g. without X display). + DesktopCaptureOptions(); + ~DesktopCaptureOptions(); + + // Returns instance of DesktopCaptureOptions with default parameters. On Linux + // also initializes X window connection. x_display() will be set to null if + // X11 connection failed (e.g. DISPLAY isn't set). + static DesktopCaptureOptions CreateDefault(); + +#if defined(USE_X11) + SharedXDisplay* x_display() const { return x_display_; } + void set_x_display(scoped_refptr x_display) { + x_display_ = x_display; + } +#endif + +#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) + DesktopConfigurationMonitor* configuration_monitor() const { + return configuration_monitor_; + } + void set_configuration_monitor(scoped_refptr m) { + configuration_monitor_ = m; + } +#endif + + // Flag indicating that the capturer should use screen change notifications. + // Enables/disables use of XDAMAGE in the X11 capturer. + bool use_update_notifications() const { return use_update_notifications_; } + void set_use_update_notifications(bool use_update_notifications) { + use_update_notifications_ = use_update_notifications; + } + + // Flag indicating if desktop effects (e.g. Aero) should be disabled when the + // capturer is active. Currently used only on Windows. + bool disable_effects() const { return disable_effects_; } + void set_disable_effects(bool disable_effects) { + disable_effects_ = disable_effects; + } + + private: +#if defined(USE_X11) + scoped_refptr x_display_; +#endif + +#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) + scoped_refptr configuration_monitor_; +#endif + bool use_update_notifications_; + bool disable_effects_; +}; + +} // namespace webrtc + +#endif // WEBRTC_MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_OPTIONS_H_ diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_capture_types.h b/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_capture_types.h new file mode 100644 index 000000000000..3e4179655316 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_capture_types.h @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_TYPES_H_ +#define WEBRTC_MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_TYPES_H_ + +#include + +#include "webrtc/modules/desktop_capture/desktop_geometry.h" +#include "webrtc/typedefs.h" + +namespace webrtc { + +// Type used to identify windows on the desktop. Values are platform-specific: +// - On Windows: HWND cast to intptr_t. +// - On Linux (with X11): X11 Window (unsigned long) type cast to intptr_t. +// - On OSX: integer window number. +typedef intptr_t WindowId; + +const WindowId kNullWindowId = 0; + +// Type used to identify screens on the desktop. Values are platform-specific: +// - On Windows: integer display device index. +// - On OSX: CGDirectDisplayID cast to intptr_t. +// - On Linux (with X11): TBD. +typedef intptr_t ScreenId; + +// The screen id corresponds to all screen combined together. +const ScreenId kFullDesktopScreenId = -1; + +const ScreenId kInvalidScreenId = -2; + +} // namespace webrtc + +#endif // WEBRTC_MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_TYPES_H_ + diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_frame.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_frame.cc index 90e1fbd3e3f6..f26dc9371b61 100644 --- a/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_frame.cc +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_frame.cc @@ -10,6 +10,9 @@ #include "webrtc/modules/desktop_capture/desktop_frame.h" +#include +#include + namespace webrtc { DesktopFrame::DesktopFrame(DesktopSize size, @@ -25,6 +28,30 @@ DesktopFrame::DesktopFrame(DesktopSize size, DesktopFrame::~DesktopFrame() {} +void DesktopFrame::CopyPixelsFrom(uint8_t* src_buffer, int src_stride, + const DesktopRect& dest_rect) { + assert(DesktopRect::MakeSize(size()).ContainsRect(dest_rect)); + + uint8_t* dest = data() + stride() * dest_rect.top() + + DesktopFrame::kBytesPerPixel * dest_rect.left(); + for (int y = 0; y < dest_rect.height(); ++y) { + memcpy(dest, src_buffer, DesktopFrame::kBytesPerPixel * dest_rect.width()); + src_buffer += src_stride; + dest += stride(); + } +} + +void DesktopFrame::CopyPixelsFrom(const DesktopFrame& src_frame, + const DesktopVector& src_pos, + const DesktopRect& dest_rect) { + assert(DesktopRect::MakeSize(src_frame.size()).ContainsRect( + DesktopRect::MakeOriginSize(src_pos, dest_rect.size()))); + + CopyPixelsFrom(src_frame.data() + src_frame.stride() * src_pos.y() + + DesktopFrame::kBytesPerPixel * src_pos.x(), + src_frame.stride(), dest_rect); +} + BasicDesktopFrame::BasicDesktopFrame(DesktopSize size) : DesktopFrame(size, kBytesPerPixel * size.width(), new uint8_t[kBytesPerPixel * size.width() * size.height()], @@ -35,6 +62,20 @@ BasicDesktopFrame::~BasicDesktopFrame() { delete[] data_; } +DesktopFrame* BasicDesktopFrame::CopyOf(const DesktopFrame& frame) { + DesktopFrame* result = new BasicDesktopFrame(frame.size()); + for (int y = 0; y < frame.size().height(); ++y) { + memcpy(result->data() + y * result->stride(), + frame.data() + y * frame.stride(), + frame.size().width() * kBytesPerPixel); + } + result->set_dpi(frame.dpi()); + result->set_capture_time_ms(frame.capture_time_ms()); + *result->mutable_updated_region() = frame.updated_region(); + return result; +} + + SharedMemoryDesktopFrame::SharedMemoryDesktopFrame( DesktopSize size, int stride, diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_frame.h b/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_frame.h index a39eff745245..781d1080552d 100644 --- a/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_frame.h +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_frame.h @@ -14,6 +14,7 @@ #include "webrtc/modules/desktop_capture/desktop_geometry.h" #include "webrtc/modules/desktop_capture/desktop_region.h" #include "webrtc/modules/desktop_capture/shared_memory.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" #include "webrtc/typedefs.h" namespace webrtc { @@ -53,6 +54,19 @@ class DesktopFrame { int32_t capture_time_ms() const { return capture_time_ms_; } void set_capture_time_ms(int32_t time_ms) { capture_time_ms_ = time_ms; } + // Optional shape for the frame. Frames may be shaped e.g. if + // capturing the contents of a shaped window. + const DesktopRegion* shape() const { return shape_.get(); } + void set_shape(DesktopRegion* shape) { shape_.reset(shape); } + + // Copies pixels from a buffer or another frame. |dest_rect| rect must lay + // within bounds of this frame. + void CopyPixelsFrom(uint8_t* src_buffer, int src_stride, + const DesktopRect& dest_rect); + void CopyPixelsFrom(const DesktopFrame& src_frame, + const DesktopVector& src_pos, + const DesktopRect& dest_rect); + protected: DesktopFrame(DesktopSize size, int stride, @@ -69,9 +83,9 @@ class DesktopFrame { SharedMemory* const shared_memory_; DesktopRegion updated_region_; - DesktopVector dpi_; int32_t capture_time_ms_; + scoped_ptr shape_; private: DISALLOW_COPY_AND_ASSIGN(DesktopFrame); @@ -83,6 +97,9 @@ class BasicDesktopFrame : public DesktopFrame { explicit BasicDesktopFrame(DesktopSize size); virtual ~BasicDesktopFrame(); + // Creates a BasicDesktopFrame that contains copy of |frame|. + static DesktopFrame* CopyOf(const DesktopFrame& frame); + private: DISALLOW_COPY_AND_ASSIGN(BasicDesktopFrame); }; diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_geometry.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_geometry.cc index 5811a8dac9a0..1ff7c683c798 100644 --- a/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_geometry.cc +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_geometry.cc @@ -14,6 +14,16 @@ namespace webrtc { +bool DesktopRect::Contains(const DesktopVector& point) const { + return point.x() >= left() && point.x() < right() && + point.y() >= top() && point.y() < bottom(); +} + +bool DesktopRect::ContainsRect(const DesktopRect& rect) const { + return rect.left() >= left() && rect.right() <= right() && + rect.top() >= top() && rect.bottom() <= bottom(); +} + void DesktopRect::IntersectWith(const DesktopRect& rect) { left_ = std::max(left(), rect.left()); top_ = std::max(top(), rect.top()); diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_geometry.h b/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_geometry.h index 2f87cfa06518..e51273d8d21e 100644 --- a/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_geometry.h +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_geometry.h @@ -35,6 +35,13 @@ class DesktopVector { y_ = y; } + DesktopVector add(const DesktopVector& other) const { + return DesktopVector(x() + other.x(), y() + other.y()); + } + DesktopVector subtract(const DesktopVector& other) const { + return DesktopVector(x() - other.x(), y() - other.y()); + } + private: int32_t x_; int32_t y_; @@ -84,6 +91,10 @@ class DesktopRect { int32_t right, int32_t bottom) { return DesktopRect(left, top, right, bottom); } + static DesktopRect MakeOriginSize(const DesktopVector& origin, + const DesktopSize& size) { + return MakeXYWH(origin.x(), origin.y(), size.width(), size.height()); + } DesktopRect() : left_(0), top_(0), right_(0), bottom_(0) {} @@ -94,6 +105,9 @@ class DesktopRect { int32_t width() const { return right_ - left_; } int32_t height() const { return bottom_ - top_; } + DesktopVector top_left() const { return DesktopVector(left_, top_); } + DesktopSize size() const { return DesktopSize(width(), height()); } + bool is_empty() const { return left_ >= right_ || top_ >= bottom_; } bool equals(const DesktopRect& other) const { @@ -101,11 +115,18 @@ class DesktopRect { right_ == other.right_ && bottom_ == other.bottom_; } + // Returns true if |point| lies within the rectangle boundaries. + bool Contains(const DesktopVector& point) const; + + // Returns true if |rect| lies within the boundaries of this rectangle. + bool ContainsRect(const DesktopRect& rect) const; + // Finds intersection with |rect|. void IntersectWith(const DesktopRect& rect); // Adds (dx, dy) to the position of the rectangle. void Translate(int32_t dx, int32_t dy); + void Translate(DesktopVector d) { Translate(d.x(), d.y()); }; private: DesktopRect(int32_t left, int32_t top, int32_t right, int32_t bottom) diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/mac/desktop_configuration.h b/media/webrtc/trunk/webrtc/modules/desktop_capture/mac/desktop_configuration.h index 433040a04eed..031d92de2aec 100644 --- a/media/webrtc/trunk/webrtc/modules/desktop_capture/mac/desktop_configuration.h +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/mac/desktop_configuration.h @@ -52,6 +52,13 @@ struct MacDesktopConfiguration { // increase as you move up the screen) or Carbon-style "top-down" coordinates. static MacDesktopConfiguration GetCurrent(Origin origin); + // Returns true if the given desktop configuration equals this one. + bool Equals(const MacDesktopConfiguration& other); + + // Returns the pointer to the display configuration with the specified id. + const MacDisplayConfiguration* FindDisplayConfigurationById( + CGDirectDisplayID id); + // Bounds of the desktop in Density-Independent Pixels (DIPs). DesktopRect bounds; diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/mac/desktop_configuration.mm b/media/webrtc/trunk/webrtc/modules/desktop_capture/mac/desktop_configuration.mm index a917b5dc0524..838973e42b56 100644 --- a/media/webrtc/trunk/webrtc/modules/desktop_capture/mac/desktop_configuration.mm +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/mac/desktop_configuration.mm @@ -110,15 +110,8 @@ MacDesktopConfiguration MacDesktopConfiguration::GetCurrent(Origin origin) { MacDisplayConfiguration display_config = GetConfigurationForScreen([screens objectAtIndex: i]); - // Handling mixed-DPI is hard, so we only return displays that match the - // "primary" display's DPI. The primary display is always the first in the - // list returned by [NSScreen screens]. - if (i == 0) { + if (i == 0) desktop_config.dip_to_pixel_scale = display_config.dip_to_pixel_scale; - } else if (desktop_config.dip_to_pixel_scale != - display_config.dip_to_pixel_scale) { - continue; - } // Cocoa uses bottom-up coordinates, so if the caller wants top-down then // we need to invert the positions of secondary monitors relative to the @@ -126,8 +119,16 @@ MacDesktopConfiguration MacDesktopConfiguration::GetCurrent(Origin origin) { if (i > 0 && origin == TopLeftOrigin) { InvertRectYOrigin(desktop_config.displays[0].bounds, &display_config.bounds); - InvertRectYOrigin(desktop_config.displays[0].pixel_bounds, - &display_config.pixel_bounds); + // |display_bounds| is density dependent, so we need to convert the + // primay monitor's position into the secondary monitor's density context. + float scaling_factor = display_config.dip_to_pixel_scale / + desktop_config.displays[0].dip_to_pixel_scale; + DesktopRect primary_bounds = DesktopRect::MakeLTRB( + desktop_config.displays[0].pixel_bounds.left() * scaling_factor, + desktop_config.displays[0].pixel_bounds.top() * scaling_factor, + desktop_config.displays[0].pixel_bounds.right() * scaling_factor, + desktop_config.displays[0].pixel_bounds.bottom() * scaling_factor); + InvertRectYOrigin(primary_bounds, &display_config.pixel_bounds); } // Add the display to the configuration. @@ -143,4 +144,33 @@ MacDesktopConfiguration MacDesktopConfiguration::GetCurrent(Origin origin) { return desktop_config; } +// For convenience of comparing MacDisplayConfigurations in +// MacDesktopConfiguration::Equals. +bool operator==(const MacDisplayConfiguration& left, + const MacDisplayConfiguration& right) { + return left.id == right.id && + left.bounds.equals(right.bounds) && + left.pixel_bounds.equals(right.pixel_bounds) && + left.dip_to_pixel_scale == right.dip_to_pixel_scale; +} + +bool MacDesktopConfiguration::Equals(const MacDesktopConfiguration& other) { + return bounds.equals(other.bounds) && + pixel_bounds.equals(other.pixel_bounds) && + dip_to_pixel_scale == other.dip_to_pixel_scale && + displays == other.displays; +} + +// Finds the display configuration with the specified id. +const MacDisplayConfiguration* +MacDesktopConfiguration::FindDisplayConfigurationById( + CGDirectDisplayID id) { + for (MacDisplayConfigurations::const_iterator it = displays.begin(); + it != displays.end(); ++it) { + if (it->id == id) + return &(*it); + } + return NULL; +} + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.cc new file mode 100644 index 000000000000..f0d5c34be650 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.cc @@ -0,0 +1,91 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h" + +#include "webrtc/modules/desktop_capture/mac/desktop_configuration.h" +#include "webrtc/system_wrappers/interface/event_wrapper.h" +#include "webrtc/system_wrappers/interface/logging.h" + +namespace webrtc { + +// The amount of time allowed for displays to reconfigure. +static const int64_t kDisplayConfigurationEventTimeoutMs = 10 * 1000; + +DesktopConfigurationMonitor::DesktopConfigurationMonitor() + : ref_count_(0), + display_configuration_capture_event_(EventWrapper::Create()) { + CGError err = CGDisplayRegisterReconfigurationCallback( + DesktopConfigurationMonitor::DisplaysReconfiguredCallback, this); + if (err != kCGErrorSuccess) { + LOG(LS_ERROR) << "CGDisplayRegisterReconfigurationCallback " << err; + abort(); + } + display_configuration_capture_event_->Set(); + + desktop_configuration_ = MacDesktopConfiguration::GetCurrent( + MacDesktopConfiguration::TopLeftOrigin); +} + +DesktopConfigurationMonitor::~DesktopConfigurationMonitor() { + CGError err = CGDisplayRemoveReconfigurationCallback( + DesktopConfigurationMonitor::DisplaysReconfiguredCallback, this); + if (err != kCGErrorSuccess) + LOG(LS_ERROR) << "CGDisplayRemoveReconfigurationCallback " << err; +} + +void DesktopConfigurationMonitor::Lock() { + if (!display_configuration_capture_event_->Wait( + kDisplayConfigurationEventTimeoutMs)) { + LOG_F(LS_ERROR) << "Event wait timed out."; + abort(); + } +} + +void DesktopConfigurationMonitor::Unlock() { + display_configuration_capture_event_->Set(); +} + +// static +void DesktopConfigurationMonitor::DisplaysReconfiguredCallback( + CGDirectDisplayID display, + CGDisplayChangeSummaryFlags flags, + void *user_parameter) { + DesktopConfigurationMonitor* monitor = + reinterpret_cast(user_parameter); + monitor->DisplaysReconfigured(display, flags); +} + +void DesktopConfigurationMonitor::DisplaysReconfigured( + CGDirectDisplayID display, + CGDisplayChangeSummaryFlags flags) { + if (flags & kCGDisplayBeginConfigurationFlag) { + if (reconfiguring_displays_.empty()) { + // If this is the first display to start reconfiguring then wait on + // |display_configuration_capture_event_| to block the capture thread + // from accessing display memory until the reconfiguration completes. + if (!display_configuration_capture_event_->Wait( + kDisplayConfigurationEventTimeoutMs)) { + LOG_F(LS_ERROR) << "Event wait timed out."; + abort(); + } + } + reconfiguring_displays_.insert(display); + } else { + reconfiguring_displays_.erase(display); + if (reconfiguring_displays_.empty()) { + desktop_configuration_ = MacDesktopConfiguration::GetCurrent( + MacDesktopConfiguration::TopLeftOrigin); + display_configuration_capture_event_->Set(); + } + } +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h b/media/webrtc/trunk/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h new file mode 100644 index 000000000000..27143a84e1eb --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h @@ -0,0 +1,66 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_MAC_DESKTOP_CONFIGURATION_MONITOR_H_ +#define WEBRTC_MODULES_DESKTOP_CAPTURE_MAC_DESKTOP_CONFIGURATION_MONITOR_H_ + +#include + +#include + +#include "webrtc/modules/desktop_capture/mac/desktop_configuration.h" +#include "webrtc/system_wrappers/interface/atomic32.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" + +namespace webrtc { + +class EventWrapper; + +// The class provides functions to synchronize capturing and display +// reconfiguring across threads, and the up-to-date MacDesktopConfiguration. +class DesktopConfigurationMonitor { + public: + DesktopConfigurationMonitor(); + // Acquires a lock on the current configuration. + void Lock(); + // Releases the lock previously acquired. + void Unlock(); + // Returns the current desktop configuration. Should only be called when the + // lock has been acquired. + const MacDesktopConfiguration& desktop_configuration() { + return desktop_configuration_; + } + + void AddRef() { ++ref_count_; } + void Release() { + if (--ref_count_ == 0) + delete this; + } + + private: + static void DisplaysReconfiguredCallback(CGDirectDisplayID display, + CGDisplayChangeSummaryFlags flags, + void *user_parameter); + ~DesktopConfigurationMonitor(); + + void DisplaysReconfigured(CGDirectDisplayID display, + CGDisplayChangeSummaryFlags flags); + + Atomic32 ref_count_; + std::set reconfiguring_displays_; + MacDesktopConfiguration desktop_configuration_; + scoped_ptr display_configuration_capture_event_; + + DISALLOW_COPY_AND_ASSIGN(DesktopConfigurationMonitor); +}; + +} // namespace webrtc + +#endif // WEBRTC_MODULES_DESKTOP_CAPTURE_MAC_DESKTOP_CONFIGURATION_MONITOR_H_ diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/mac/osx_version.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/mac/osx_version.cc new file mode 100644 index 000000000000..7466f20342b6 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/mac/osx_version.cc @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include + +#include "webrtc/system_wrappers/interface/logging.h" + +namespace webrtc { + +namespace { + +int GetDarwinVersion() { + struct utsname uname_info; + if (uname(&uname_info) != 0) { + LOG(LS_ERROR) << "uname failed"; + return 0; + } + + if (strcmp(uname_info.sysname, "Darwin") != 0) + return 0; + + char* dot; + int result = strtol(uname_info.release, &dot, 10); + if (*dot != '.') { + LOG(LS_ERROR) << "Failed to parse version"; + return 0; + } + + return result; +} + +} // namespace + +bool IsOSLionOrLater() { + static int darwin_version = GetDarwinVersion(); + + // Verify that the version has been parsed correctly. + if (darwin_version < 6) { + LOG_F(LS_ERROR) << "Invalid Darwin version: " << darwin_version; + abort(); + } + + // Darwin major version 11 corresponds to OSX 10.7. + return darwin_version >= 11; +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/run_tests.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/mac/osx_version.h similarity index 66% rename from media/webrtc/trunk/webrtc/video_engine/test/common/run_tests.cc rename to media/webrtc/trunk/webrtc/modules/desktop_capture/mac/osx_version.h index 4692ba615a33..0ba49a4e69e8 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/run_tests.cc +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/mac/osx_version.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -7,14 +7,10 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#include "testing/gtest/include/gtest/gtest.h" namespace webrtc { -namespace test { -int RunAllTests() { - return RUN_ALL_TESTS(); -} +// Returns true if the OS version >= OSX 10.7. +bool IsOSLionOrLater(); -} // namespace test } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor.cc new file mode 100644 index 000000000000..07c89f043a22 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor.cc @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/desktop_capture/mouse_cursor.h" + +#include "webrtc/modules/desktop_capture/desktop_frame.h" + +namespace webrtc { + +MouseCursor::MouseCursor() {} + +MouseCursor::MouseCursor(DesktopFrame* image, const DesktopVector& hotspot) + : image_(image), + hotspot_(hotspot) { + assert(0 <= hotspot_.x() && hotspot_.x() <= image_->size().width()); + assert(0 <= hotspot_.y() && hotspot_.y() <= image_->size().height()); +} + +MouseCursor::~MouseCursor() {} + +// static +MouseCursor* MouseCursor::CopyOf(const MouseCursor& cursor) { + return cursor.image() + ? new MouseCursor(BasicDesktopFrame::CopyOf(*cursor.image()), + cursor.hotspot()) + : new MouseCursor(); +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor.h b/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor.h new file mode 100644 index 000000000000..3acfa45a3306 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor.h @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_MOUSE_CURSOR_H_ +#define WEBRTC_MODULES_DESKTOP_CAPTURE_MOUSE_CURSOR_H_ + +#include "webrtc/modules/desktop_capture/desktop_geometry.h" +#include "webrtc/system_wrappers/interface/constructor_magic.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" + +namespace webrtc { + +class DesktopFrame; + +class MouseCursor { + public: + MouseCursor(); + + // Takes ownership of |image|. |hotspot| must be within |image| boundaries. + MouseCursor(DesktopFrame* image, const DesktopVector& hotspot); + + ~MouseCursor(); + + static MouseCursor* CopyOf(const MouseCursor& cursor); + + void set_image(DesktopFrame* image) { image_.reset(image); } + const DesktopFrame* image() const { return image_.get(); } + + void set_hotspot(const DesktopVector& hotspot ) { hotspot_ = hotspot; } + const DesktopVector& hotspot() const { return hotspot_; } + + private: + scoped_ptr image_; + DesktopVector hotspot_; + + DISALLOW_COPY_AND_ASSIGN(MouseCursor); +}; + +} // namespace webrtc + +#endif // WEBRTC_MODULES_DESKTOP_CAPTURE_MOUSE_CURSOR_H_ diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_monitor.h b/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_monitor.h new file mode 100644 index 000000000000..24dfe72dfa7b --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_monitor.h @@ -0,0 +1,90 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_MOUSE_CURSOR_MONITOR_H_ +#define WEBRTC_MODULES_DESKTOP_CAPTURE_MOUSE_CURSOR_MONITOR_H_ + +#include "webrtc/modules/desktop_capture/desktop_capture_types.h" +#include "webrtc/modules/desktop_capture/desktop_geometry.h" +#include "webrtc/typedefs.h" + +namespace webrtc { + +class DesktopCaptureOptions; +class DesktopFrame; +class MouseCursor; + +// Captures mouse shape and position. +class MouseCursorMonitor { + public: + enum CursorState { + // Cursor on top of the window including window decorations. + INSIDE, + + // Cursor is outside of the window. + OUTSIDE, + }; + + enum Mode { + // Capture only shape of the mouse cursor, but not position. + SHAPE_ONLY, + + // Capture both, mouse cursor shape and position. + SHAPE_AND_POSITION, + }; + + // Callback interface used to pass current mouse cursor position and shape. + class Callback { + public: + // Called in response to Capture() when the cursor shape has changed. Must + // take ownership of |cursor|. + virtual void OnMouseCursor(MouseCursor* cursor) = 0; + + // Called in response to Capture(). |position| indicates cursor position + // relative to the |window| specified in the constructor. + virtual void OnMouseCursorPosition(CursorState state, + const DesktopVector& position) = 0; + + protected: + virtual ~Callback() {} + }; + + virtual ~MouseCursorMonitor() {} + + // Creates a capturer that notifies of mouse cursor events while the cursor is + // over the specified window. + static MouseCursorMonitor* CreateForWindow( + const DesktopCaptureOptions& options, + WindowId window); + + // Creates a capturer that monitors the mouse cursor shape and position across + // the entire desktop. + // + // TODO(sergeyu): Provide a way to select a specific screen. + static MouseCursorMonitor* CreateForScreen( + const DesktopCaptureOptions& options, + ScreenId screen); + + // Initializes the monitor with the |callback|, which must remain valid until + // capturer is destroyed. + virtual void Init(Callback* callback, Mode mode) = 0; + + // Captures current cursor shape and position (depending on the |mode| passed + // to Init()). Calls Callback::OnMouseCursor() if cursor shape has + // changed since the last call (or when Capture() is called for the first + // time) and then Callback::OnMouseCursorPosition() if mode is set to + // SHAPE_AND_POSITION. + virtual void Capture() = 0; +}; + +} // namespace webrtc + +#endif // WEBRTC_MODULES_DESKTOP_CAPTURE_MOUSE_CURSOR_MONITOR_H_ + diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_monitor_mac.mm b/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_monitor_mac.mm new file mode 100644 index 000000000000..e88063381975 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_monitor_mac.mm @@ -0,0 +1,279 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/desktop_capture/mouse_cursor_monitor.h" + +#include +#include +#include +#include + +#include "webrtc/modules/desktop_capture/desktop_capture_options.h" +#include "webrtc/modules/desktop_capture/desktop_frame.h" +#include "webrtc/modules/desktop_capture/mac/desktop_configuration.h" +#include "webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h" +#include "webrtc/modules/desktop_capture/mac/osx_version.h" +#include "webrtc/modules/desktop_capture/mouse_cursor.h" +#include "webrtc/system_wrappers/interface/logging.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/system_wrappers/interface/scoped_refptr.h" + +namespace webrtc { + +class MouseCursorMonitorMac : public MouseCursorMonitor { + public: + MouseCursorMonitorMac(const DesktopCaptureOptions& options, + CGWindowID window_id, + ScreenId screen_id); + virtual ~MouseCursorMonitorMac(); + + virtual void Init(Callback* callback, Mode mode) OVERRIDE; + virtual void Capture() OVERRIDE; + + private: + static void DisplaysReconfiguredCallback(CGDirectDisplayID display, + CGDisplayChangeSummaryFlags flags, + void *user_parameter); + void DisplaysReconfigured(CGDirectDisplayID display, + CGDisplayChangeSummaryFlags flags); + + void CaptureImage(); + + scoped_refptr configuration_monitor_; + CGWindowID window_id_; + ScreenId screen_id_; + Callback* callback_; + Mode mode_; + scoped_ptr last_cursor_; +}; + +MouseCursorMonitorMac::MouseCursorMonitorMac( + const DesktopCaptureOptions& options, + CGWindowID window_id, + ScreenId screen_id) + : configuration_monitor_(options.configuration_monitor()), + window_id_(window_id), + screen_id_(screen_id), + callback_(NULL), + mode_(SHAPE_AND_POSITION) { + assert(window_id == kCGNullWindowID || screen_id == kInvalidScreenId); + if (screen_id != kInvalidScreenId && !IsOSLionOrLater()) { + // Single screen capture is not supported on pre OS X 10.7. + screen_id_ = kFullDesktopScreenId; + } +} + +MouseCursorMonitorMac::~MouseCursorMonitorMac() {} + +void MouseCursorMonitorMac::Init(Callback* callback, Mode mode) { + assert(!callback_); + assert(callback); + + callback_ = callback; + mode_ = mode; +} + +void MouseCursorMonitorMac::Capture() { + assert(callback_); + + CaptureImage(); + + if (mode_ != SHAPE_AND_POSITION) + return; + + CursorState state = INSIDE; + + CGEventRef event = CGEventCreate(NULL); + CGPoint gc_position = CGEventGetLocation(event); + CFRelease(event); + + DesktopVector position(gc_position.x, gc_position.y); + + configuration_monitor_->Lock(); + MacDesktopConfiguration configuration = + configuration_monitor_->desktop_configuration(); + configuration_monitor_->Unlock(); + float scale = 1.0f; + + // Find the dpi to physical pixel scale for the screen where the mouse cursor + // is. + for (MacDisplayConfigurations::iterator it = configuration.displays.begin(); + it != configuration.displays.end(); ++it) { + if (it->bounds.Contains(position)) { + scale = it->dip_to_pixel_scale; + break; + } + } + // If we are capturing cursor for a specific window then we need to figure out + // if the current mouse position is covered by another window and also adjust + // |position| to make it relative to the window origin. + if (window_id_ != kCGNullWindowID) { + // Get list of windows that may be covering parts of |window_id_|. + // CGWindowListCopyWindowInfo() returns windows in order from front to back, + // so |window_id_| is expected to be the last in the list. + CFArrayRef window_array = + CGWindowListCopyWindowInfo(kCGWindowListOptionOnScreenOnly | + kCGWindowListOptionOnScreenAboveWindow | + kCGWindowListOptionIncludingWindow, + window_id_); + bool found_window = false; + if (window_array) { + CFIndex count = CFArrayGetCount(window_array); + for (CFIndex i = 0; i < count; ++i) { + CFDictionaryRef window = reinterpret_cast( + CFArrayGetValueAtIndex(window_array, i)); + + // Skip the Dock window. Dock window covers the whole screen, but it is + // transparent. + CFStringRef window_name = reinterpret_cast( + CFDictionaryGetValue(window, kCGWindowName)); + if (window_name && CFStringCompare(window_name, CFSTR("Dock"), 0) == 0) + continue; + + CFDictionaryRef window_bounds = reinterpret_cast( + CFDictionaryGetValue(window, kCGWindowBounds)); + CFNumberRef window_number = reinterpret_cast( + CFDictionaryGetValue(window, kCGWindowNumber)); + + if (window_bounds && window_number) { + CGRect gc_window_rect; + if (!CGRectMakeWithDictionaryRepresentation(window_bounds, + &gc_window_rect)) { + continue; + } + DesktopRect window_rect = + DesktopRect::MakeXYWH(gc_window_rect.origin.x, + gc_window_rect.origin.y, + gc_window_rect.size.width, + gc_window_rect.size.height); + + CGWindowID window_id; + if (!CFNumberGetValue(window_number, kCFNumberIntType, &window_id)) + continue; + + if (window_id == window_id_) { + found_window = true; + if (!window_rect.Contains(position)) + state = OUTSIDE; + position = position.subtract(window_rect.top_left()); + + assert(i == count - 1); + break; + } else if (window_rect.Contains(position)) { + state = OUTSIDE; + position.set(-1, -1); + break; + } + } + } + CFRelease(window_array); + } + if (!found_window) { + // If we failed to get list of windows or the window wasn't in the list + // pretend that the cursor is outside the window. This can happen, e.g. if + // the window was closed. + state = OUTSIDE; + position.set(-1, -1); + } + } else { + assert(screen_id_ >= kFullDesktopScreenId); + if (screen_id_ != kFullDesktopScreenId) { + // For single screen capturing, convert the position to relative to the + // target screen. + const MacDisplayConfiguration* config = + configuration.FindDisplayConfigurationById( + static_cast(screen_id_)); + if (config) { + if (!config->pixel_bounds.Contains(position)) + state = OUTSIDE; + position = position.subtract(config->bounds.top_left()); + } else { + // The target screen is no longer valid. + state = OUTSIDE; + position.set(-1, -1); + } + } else { + position.subtract(configuration.bounds.top_left()); + } + } + if (state == INSIDE) { + // Convert Density Independent Pixel to physical pixel. + position = DesktopVector(round(position.x() * scale), + round(position.y() * scale)); + } + callback_->OnMouseCursorPosition(state, position); +} + +void MouseCursorMonitorMac::CaptureImage() { + NSCursor* nscursor = [NSCursor currentSystemCursor]; + + NSImage* nsimage = [nscursor image]; + NSSize nssize = [nsimage size]; + DesktopSize size(nssize.width, nssize.height); + NSPoint nshotspot = [nscursor hotSpot]; + DesktopVector hotspot( + std::max(0, std::min(size.width(), static_cast(nshotspot.x))), + std::max(0, std::min(size.height(), static_cast(nshotspot.y)))); + CGImageRef cg_image = + [nsimage CGImageForProposedRect:NULL context:nil hints:nil]; + if (!cg_image) + return; + + if (CGImageGetBitsPerPixel(cg_image) != DesktopFrame::kBytesPerPixel * 8 || + CGImageGetBytesPerRow(cg_image) != + static_cast(DesktopFrame::kBytesPerPixel * size.width()) || + CGImageGetBitsPerComponent(cg_image) != 8) { + return; + } + + CGDataProviderRef provider = CGImageGetDataProvider(cg_image); + CFDataRef image_data_ref = CGDataProviderCopyData(provider); + if (image_data_ref == NULL) + return; + + const uint8_t* src_data = + reinterpret_cast(CFDataGetBytePtr(image_data_ref)); + + // Compare the cursor with the previous one. + if (last_cursor_.get() && + last_cursor_->image()->size().equals(size) && + last_cursor_->hotspot().equals(hotspot) && + memcmp(last_cursor_->image()->data(), src_data, + last_cursor_->image()->stride() * size.height()) == 0) { + return; + } + + // Create a MouseCursor that describes the cursor and pass it to + // the client. + scoped_ptr image( + new BasicDesktopFrame(DesktopSize(size.width(), size.height()))); + memcpy(image->data(), src_data, + size.width() * size.height() * DesktopFrame::kBytesPerPixel); + + CFRelease(image_data_ref); + + scoped_ptr cursor(new MouseCursor(image.release(), hotspot)); + last_cursor_.reset(MouseCursor::CopyOf(*cursor)); + + callback_->OnMouseCursor(cursor.release()); +} + +MouseCursorMonitor* MouseCursorMonitor::CreateForWindow( + const DesktopCaptureOptions& options, WindowId window) { + return new MouseCursorMonitorMac(options, window, kInvalidScreenId); +} + +MouseCursorMonitor* MouseCursorMonitor::CreateForScreen( + const DesktopCaptureOptions& options, + ScreenId screen) { + return new MouseCursorMonitorMac(options, kCGNullWindowID, screen); +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_monitor_null.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_monitor_null.cc new file mode 100644 index 000000000000..94a20e113ff4 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_monitor_null.cc @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/desktop_capture/mouse_cursor_monitor.h" + +#include + +namespace webrtc { + +MouseCursorMonitor* MouseCursorMonitor::CreateForWindow( + const DesktopCaptureOptions& options, + WindowId window) { + return NULL; +} + +MouseCursorMonitor* MouseCursorMonitor::CreateForScreen( + const DesktopCaptureOptions& options, + ScreenId screen) { + return NULL; +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_monitor_unittest.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_monitor_unittest.cc new file mode 100644 index 000000000000..7849005f9001 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_monitor_unittest.cc @@ -0,0 +1,121 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/desktop_capture/mouse_cursor_monitor.h" + +#include "gtest/gtest.h" +#include "webrtc/modules/desktop_capture/desktop_capture_options.h" +#include "webrtc/modules/desktop_capture/desktop_frame.h" +#include "webrtc/modules/desktop_capture/mouse_cursor.h" +#include "webrtc/modules/desktop_capture/window_capturer.h" +#include "webrtc/system_wrappers/interface/logging.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" + +namespace webrtc { + +class MouseCursorMonitorTest : public testing::Test, + public MouseCursorMonitor::Callback { + public: + MouseCursorMonitorTest() + : position_received_(false) { + } + + // MouseCursorMonitor::Callback interface + virtual void OnMouseCursor(MouseCursor* cursor_image) OVERRIDE { + cursor_image_.reset(cursor_image); + } + + virtual void OnMouseCursorPosition(MouseCursorMonitor::CursorState state, + const DesktopVector& position) OVERRIDE { + state_ = state; + position_ = position; + position_received_ = true; + } + + protected: + scoped_ptr cursor_image_; + MouseCursorMonitor::CursorState state_; + DesktopVector position_; + bool position_received_; +}; + +// TODO(sergeyu): On Mac we need to initialize NSApplication before running the +// tests. Figure out how to do that without breaking other tests in +// modules_unittests and enable these tests on Mac. +// https://code.google.com/p/webrtc/issues/detail?id=2532 +#if !defined(WEBRTC_MAC) +#define MAYBE(x) x +#else +#define MAYBE(x) DISABLED_##x +#endif + +TEST_F(MouseCursorMonitorTest, MAYBE(FromScreen)) { + scoped_ptr capturer(MouseCursorMonitor::CreateForScreen( + DesktopCaptureOptions::CreateDefault(), webrtc::kFullDesktopScreenId)); + assert(capturer.get()); + capturer->Init(this, MouseCursorMonitor::SHAPE_AND_POSITION); + capturer->Capture(); + + EXPECT_TRUE(cursor_image_.get()); + EXPECT_GE(cursor_image_->hotspot().x(), 0); + EXPECT_LE(cursor_image_->hotspot().x(), + cursor_image_->image()->size().width()); + EXPECT_GE(cursor_image_->hotspot().y(), 0); + EXPECT_LE(cursor_image_->hotspot().y(), + cursor_image_->image()->size().height()); + + EXPECT_TRUE(position_received_); + EXPECT_EQ(MouseCursorMonitor::INSIDE, state_); +} + +TEST_F(MouseCursorMonitorTest, MAYBE(FromWindow)) { + DesktopCaptureOptions options = DesktopCaptureOptions::CreateDefault(); + + // First get list of windows. + scoped_ptr window_capturer(WindowCapturer::Create(options)); + + // If window capturing is not supported then skip this test. + if (!window_capturer.get()) + return; + + WindowCapturer::WindowList windows; + EXPECT_TRUE(window_capturer->GetWindowList(&windows)); + + // Iterate over all windows and try capturing mouse cursor for each of them. + for (size_t i = 0; i < windows.size(); ++i) { + cursor_image_.reset(); + position_received_ = false; + + scoped_ptr capturer( + MouseCursorMonitor::CreateForWindow( + DesktopCaptureOptions::CreateDefault(), windows[i].id)); + assert(capturer.get()); + + capturer->Init(this, MouseCursorMonitor::SHAPE_AND_POSITION); + capturer->Capture(); + + EXPECT_TRUE(cursor_image_.get()); + EXPECT_TRUE(position_received_); + } +} + +// Make sure that OnMouseCursorPosition() is not called in the SHAPE_ONLY mode. +TEST_F(MouseCursorMonitorTest, MAYBE(ShapeOnly)) { + scoped_ptr capturer(MouseCursorMonitor::CreateForScreen( + DesktopCaptureOptions::CreateDefault(), webrtc::kFullDesktopScreenId)); + assert(capturer.get()); + capturer->Init(this, MouseCursorMonitor::SHAPE_ONLY); + capturer->Capture(); + + EXPECT_TRUE(cursor_image_.get()); + EXPECT_FALSE(position_received_); +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc new file mode 100644 index 000000000000..fc041ef51701 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc @@ -0,0 +1,167 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/desktop_capture/mouse_cursor_monitor.h" + +#include "webrtc/modules/desktop_capture/desktop_frame.h" +#include "webrtc/modules/desktop_capture/mouse_cursor.h" +#include "webrtc/modules/desktop_capture/win/cursor.h" +#include "webrtc/system_wrappers/interface/logging.h" + +namespace webrtc { + +class MouseCursorMonitorWin : public MouseCursorMonitor { + public: + explicit MouseCursorMonitorWin(HWND window); + explicit MouseCursorMonitorWin(ScreenId screen); + virtual ~MouseCursorMonitorWin(); + + virtual void Init(Callback* callback, Mode mode) OVERRIDE; + virtual void Capture() OVERRIDE; + + private: + DesktopRect GetScreenRect(); + + HWND window_; + ScreenId screen_; + + Callback* callback_; + Mode mode_; + + HDC desktop_dc_; + + HCURSOR last_cursor_; +}; + +MouseCursorMonitorWin::MouseCursorMonitorWin(HWND window) + : window_(window), + screen_(kInvalidScreenId), + callback_(NULL), + mode_(SHAPE_AND_POSITION), + desktop_dc_(NULL), + last_cursor_(NULL) { +} + +MouseCursorMonitorWin::MouseCursorMonitorWin(ScreenId screen) + : window_(NULL), + screen_(screen), + callback_(NULL), + mode_(SHAPE_AND_POSITION), + desktop_dc_(NULL), + last_cursor_(NULL) { + assert(screen >= kFullDesktopScreenId); +} + +MouseCursorMonitorWin::~MouseCursorMonitorWin() { + if (desktop_dc_) + ReleaseDC(NULL, desktop_dc_); +} + +void MouseCursorMonitorWin::Init(Callback* callback, Mode mode) { + assert(!callback_); + assert(callback); + + callback_ = callback; + mode_ = mode; + + desktop_dc_ = GetDC(NULL); +} + +void MouseCursorMonitorWin::Capture() { + assert(callback_); + + CURSORINFO cursor_info; + cursor_info.cbSize = sizeof(CURSORINFO); + if (!GetCursorInfo(&cursor_info)) { + LOG_F(LS_ERROR) << "Unable to get cursor info. Error = " << GetLastError(); + return; + } + + if (last_cursor_ != cursor_info.hCursor) { + last_cursor_ = cursor_info.hCursor; + // Note that |cursor_info.hCursor| does not need to be freed. + scoped_ptr cursor( + CreateMouseCursorFromHCursor(desktop_dc_, cursor_info.hCursor)); + if (cursor.get()) + callback_->OnMouseCursor(cursor.release()); + } + + if (mode_ != SHAPE_AND_POSITION) + return; + + DesktopVector position(cursor_info.ptScreenPos.x, cursor_info.ptScreenPos.y); + bool inside = cursor_info.flags == CURSOR_SHOWING; + + if (window_) { + RECT rect; + if (!GetWindowRect(window_, &rect)) { + position.set(0, 0); + inside = false; + } else { + if (inside) { + HWND windowUnderCursor = WindowFromPoint(cursor_info.ptScreenPos); + inside = windowUnderCursor ? + (window_ == GetAncestor(windowUnderCursor, GA_ROOT)) : false; + } + position = position.subtract(DesktopVector(rect.left, rect.top)); + } + } else { + assert(screen_ != kInvalidScreenId); + DesktopRect rect = GetScreenRect(); + if (inside) + inside = rect.Contains(position); + position = position.subtract(rect.top_left()); + } + + callback_->OnMouseCursorPosition(inside ? INSIDE : OUTSIDE, position); +} + +DesktopRect MouseCursorMonitorWin::GetScreenRect() { + assert(screen_ != kInvalidScreenId); + if (screen_ == kFullDesktopScreenId) { + return DesktopRect::MakeXYWH( + GetSystemMetrics(SM_XVIRTUALSCREEN), + GetSystemMetrics(SM_YVIRTUALSCREEN), + GetSystemMetrics(SM_CXVIRTUALSCREEN), + GetSystemMetrics(SM_CYVIRTUALSCREEN)); + } + DISPLAY_DEVICE device; + device.cb = sizeof(device); + BOOL result = EnumDisplayDevices(NULL, screen_, &device, 0); + if (!result) + return DesktopRect(); + + DEVMODE device_mode; + device_mode.dmSize = sizeof(device_mode); + device_mode.dmDriverExtra = 0; + result = EnumDisplaySettingsEx( + device.DeviceName, ENUM_CURRENT_SETTINGS, &device_mode, 0); + if (!result) + return DesktopRect(); + + return DesktopRect::MakeXYWH( + GetSystemMetrics(SM_XVIRTUALSCREEN) + device_mode.dmPosition.x, + GetSystemMetrics(SM_YVIRTUALSCREEN) + device_mode.dmPosition.y, + device_mode.dmPelsWidth, + device_mode.dmPelsHeight); +} + +MouseCursorMonitor* MouseCursorMonitor::CreateForWindow( + const DesktopCaptureOptions& options, WindowId window) { + return new MouseCursorMonitorWin(reinterpret_cast(window)); +} + +MouseCursorMonitor* MouseCursorMonitor::CreateForScreen( + const DesktopCaptureOptions& options, + ScreenId screen) { + return new MouseCursorMonitorWin(screen); +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_monitor_x11.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_monitor_x11.cc new file mode 100644 index 000000000000..9e196779d242 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_monitor_x11.cc @@ -0,0 +1,225 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/desktop_capture/mouse_cursor_monitor.h" + +#include +#include +#include + +#include "webrtc/modules/desktop_capture/desktop_capture_options.h" +#include "webrtc/modules/desktop_capture/desktop_frame.h" +#include "webrtc/modules/desktop_capture/mouse_cursor.h" +#include "webrtc/system_wrappers/interface/logging.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" + +namespace { + +// WindowCapturer returns window IDs of X11 windows with WM_STATE attribute. +// These windows may not be immediate children of the root window, because +// window managers may re-parent them to add decorations. However, +// XQueryPointer() expects to be passed children of the root. This function +// searches up the list of the windows to find the root child that corresponds +// to |window|. +Window GetTopLevelWindow(Display* display, Window window) { + while (true) { + // If the window is in WithdrawnState then look at all of its children. + ::Window root, parent; + ::Window *children; + unsigned int num_children; + if (!XQueryTree(display, window, &root, &parent, &children, + &num_children)) { + LOG(LS_ERROR) << "Failed to query for child windows although window" + << "does not have a valid WM_STATE."; + return None; + } + if (children) + XFree(children); + + if (parent == root) + break; + + window = parent; + } + + return window; +} + +} // namespace + +namespace webrtc { + +class MouseCursorMonitorX11 : public MouseCursorMonitor, + public SharedXDisplay::XEventHandler { + public: + MouseCursorMonitorX11(const DesktopCaptureOptions& options, Window window); + virtual ~MouseCursorMonitorX11(); + + virtual void Init(Callback* callback, Mode mode) OVERRIDE; + virtual void Capture() OVERRIDE; + + private: + // SharedXDisplay::XEventHandler interface. + virtual bool HandleXEvent(const XEvent& event) OVERRIDE; + + Display* display() { return x_display_->display(); } + + // Captures current cursor shape and stores it in |cursor_shape_|. + void CaptureCursor(); + + scoped_refptr x_display_; + Callback* callback_; + Mode mode_; + Window window_; + + bool have_xfixes_; + int xfixes_event_base_; + int xfixes_error_base_; + + scoped_ptr cursor_shape_; +}; + +MouseCursorMonitorX11::MouseCursorMonitorX11( + const DesktopCaptureOptions& options, + Window window) + : x_display_(options.x_display()), + callback_(NULL), + mode_(SHAPE_AND_POSITION), + window_(window), + have_xfixes_(false), + xfixes_event_base_(-1), + xfixes_error_base_(-1) {} + +MouseCursorMonitorX11::~MouseCursorMonitorX11() { + if (have_xfixes_) { + x_display_->RemoveEventHandler(xfixes_event_base_ + XFixesCursorNotify, + this); + } +} + +void MouseCursorMonitorX11::Init(Callback* callback, Mode mode) { + // Init can be called only once per instance of MouseCursorMonitor. + assert(!callback_); + assert(callback); + + callback_ = callback; + mode_ = mode; + + have_xfixes_ = + XFixesQueryExtension(display(), &xfixes_event_base_, &xfixes_error_base_); + + if (have_xfixes_) { + // Register for changes to the cursor shape. + XFixesSelectCursorInput(display(), window_, XFixesDisplayCursorNotifyMask); + x_display_->AddEventHandler(xfixes_event_base_ + XFixesCursorNotify, this); + + CaptureCursor(); + } else { + LOG(LS_INFO) << "X server does not support XFixes."; + } +} + +void MouseCursorMonitorX11::Capture() { + assert(callback_); + + // Process X11 events in case XFixes has sent cursor notification. + x_display_->ProcessPendingXEvents(); + + // cursor_shape_| is set only if we were notified of a cursor shape change. + if (cursor_shape_.get()) + callback_->OnMouseCursor(cursor_shape_.release()); + + // Get cursor position if necessary. + if (mode_ == SHAPE_AND_POSITION) { + int root_x; + int root_y; + int win_x; + int win_y; + Window root_window; + Window child_window; + unsigned int mask; + Bool result = XQueryPointer(display(), window_, &root_window, &child_window, + &root_x, &root_y, &win_x, &win_y, &mask); + CursorState state; + if (!result) { + state = OUTSIDE; + } else { + // In screen mode (window_ == root_window) the mouse is always inside. + // XQueryPointer() sets |child_window| to None if the cursor is outside + // |window_|. + state = + (window_ == root_window || child_window != None) ? INSIDE : OUTSIDE; + } + + callback_->OnMouseCursorPosition(state, + webrtc::DesktopVector(win_x, win_y)); + } +} + +bool MouseCursorMonitorX11::HandleXEvent(const XEvent& event) { + if (have_xfixes_ && event.type == xfixes_event_base_ + XFixesCursorNotify) { + const XFixesCursorNotifyEvent* cursor_event = + reinterpret_cast(&event); + if (cursor_event->subtype == XFixesDisplayCursorNotify) { + CaptureCursor(); + } + // Return false, even if the event has been handled, because there might be + // other listeners for cursor notifications. + } + return false; +} + +void MouseCursorMonitorX11::CaptureCursor() { + assert(have_xfixes_); + + XFixesCursorImage* img = XFixesGetCursorImage(display()); + if (!img) + return; + + scoped_ptr image( + new BasicDesktopFrame(DesktopSize(img->width, img->height))); + + // Xlib stores 32-bit data in longs, even if longs are 64-bits long. + unsigned long* src = img->pixels; + uint32_t* dst = reinterpret_cast(image->data()); + uint32_t* dst_end = dst + (img->width * img->height); + while (dst < dst_end) { + *dst++ = static_cast(*src++); + } + + DesktopVector hotspot(std::min(img->width, img->xhot), + std::min(img->height, img->yhot)); + + XFree(img); + + cursor_shape_.reset(new MouseCursor(image.release(), hotspot)); +} + +// static +MouseCursorMonitor* MouseCursorMonitor::CreateForWindow( + const DesktopCaptureOptions& options, WindowId window) { + if (!options.x_display()) + return NULL; + window = GetTopLevelWindow(options.x_display()->display(), window); + if (window == None) + return NULL; + return new MouseCursorMonitorX11(options, window); +} + +MouseCursorMonitor* MouseCursorMonitor::CreateForScreen( + const DesktopCaptureOptions& options, + ScreenId screen) { + if (!options.x_display()) + return NULL; + return new MouseCursorMonitorX11( + options, DefaultRootWindow(options.x_display()->display())); +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_shape.h b/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_shape.h index 36ab120e0f32..e759cf2edc59 100644 --- a/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_shape.h +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_shape.h @@ -18,6 +18,8 @@ namespace webrtc { // Type used to return mouse cursor shape from video capturers. +// +// TODO(sergeyu): Remove this type and use MouseCursor instead. struct MouseCursorShape { // Size of the cursor in screen pixels. DesktopSize size; diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer.cc new file mode 100644 index 000000000000..97f69d3baff1 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer.cc @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/desktop_capture/screen_capturer.h" + +#include "webrtc/modules/desktop_capture/desktop_capture_options.h" + +namespace webrtc { + +ScreenCapturer* ScreenCapturer::Create() { + return Create(DesktopCaptureOptions::CreateDefault()); +} + +#if defined(WEBRTC_LINUX) +ScreenCapturer* ScreenCapturer::CreateWithXDamage( + bool use_update_notifications) { + DesktopCaptureOptions options; + options.set_use_update_notifications(use_update_notifications); + return Create(options); +} +#elif defined(WEBRTC_WIN) +ScreenCapturer* ScreenCapturer::CreateWithDisableAero(bool disable_effects) { + DesktopCaptureOptions options; + options.set_disable_effects(disable_effects); + return Create(options); +} +#endif + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer.h b/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer.h index 17101c5221b8..a8d40a72ef83 100644 --- a/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer.h +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer.h @@ -11,12 +11,16 @@ #ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_SCREEN_CAPTURER_H_ #define WEBRTC_MODULES_DESKTOP_CAPTURE_SCREEN_CAPTURER_H_ +#include + +#include "webrtc/modules/desktop_capture/desktop_capture_types.h" #include "webrtc/modules/desktop_capture/desktop_capturer.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" #include "webrtc/typedefs.h" namespace webrtc { +class DesktopCaptureOptions; struct MouseCursorShape; // Class used to capture video frames asynchronously. @@ -39,6 +43,13 @@ struct MouseCursorShape; // Since data can be read while another capture action is happening. class ScreenCapturer : public DesktopCapturer { public: + // Use a struct to represent a screen although it has only an id for now, + // because we may want to add more fields (e.g. description) in the future. + struct Screen { + ScreenId id; + }; + typedef std::vector ScreenList; + // Provides callbacks used by the capturer to pass captured video frames and // mouse cursor shapes to the processing pipeline. // @@ -57,6 +68,10 @@ class ScreenCapturer : public DesktopCapturer { virtual ~ScreenCapturer() {} // Creates platform-specific capturer. + // + // TODO(sergeyu): Remove all Create() methods except the first one. + // crbug.com/172183 + static ScreenCapturer* Create(const DesktopCaptureOptions& options); static ScreenCapturer* Create(); #if defined(WEBRTC_LINUX) @@ -73,6 +88,15 @@ class ScreenCapturer : public DesktopCapturer { // remain valid until the capturer is destroyed. virtual void SetMouseShapeObserver( MouseShapeObserver* mouse_shape_observer) = 0; + + // Get the list of screens (not containing kFullDesktopScreenId). Returns + // false in case of a failure. + virtual bool GetScreenList(ScreenList* screens) = 0; + + // Select the screen to be captured. Returns false in case of a failure (e.g. + // if there is no screen with the specified id). If this is never called, the + // full desktop is captured. + virtual bool SelectScreen(ScreenId id) = 0; }; } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_mac.mm b/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_mac.mm index 3165fd3aabad..100309f896c2 100644 --- a/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_mac.mm +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_mac.mm @@ -19,17 +19,18 @@ #include #include #include -#include +#include "webrtc/modules/desktop_capture/desktop_capture_options.h" #include "webrtc/modules/desktop_capture/desktop_frame.h" #include "webrtc/modules/desktop_capture/desktop_geometry.h" #include "webrtc/modules/desktop_capture/desktop_region.h" #include "webrtc/modules/desktop_capture/mac/desktop_configuration.h" +#include "webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h" +#include "webrtc/modules/desktop_capture/mac/osx_version.h" #include "webrtc/modules/desktop_capture/mac/scoped_pixel_buffer_object.h" #include "webrtc/modules/desktop_capture/mouse_cursor_shape.h" #include "webrtc/modules/desktop_capture/screen_capture_frame_queue.h" #include "webrtc/modules/desktop_capture/screen_capturer_helper.h" -#include "webrtc/system_wrappers/interface/event_wrapper.h" #include "webrtc/system_wrappers/interface/logging.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" #include "webrtc/system_wrappers/interface/tick_util.h" @@ -86,46 +87,11 @@ void CopyRect(const uint8_t* src_plane, } } -int GetDarwinVersion() { - struct utsname uname_info; - if (uname(&uname_info) != 0) { - LOG(LS_ERROR) << "uname failed"; - return 0; - } - - if (strcmp(uname_info.sysname, "Darwin") != 0) - return 0; - - char* dot; - int result = strtol(uname_info.release, &dot, 10); - if (*dot != '.') { - LOG(LS_ERROR) << "Failed to parse version"; - return 0; - } - - return result; -} - -bool IsOSLionOrLater() { - static int darwin_version = GetDarwinVersion(); - - // Verify that the version has been parsed correctly. - if (darwin_version < 6) { - LOG_F(LS_ERROR) << "Invalid Darwin version: " << darwin_version; - abort(); - } - - // Darwin major version 11 corresponds to OSX 10.7. - return darwin_version >= 11; -} - -// The amount of time allowed for displays to reconfigure. -const int64_t kDisplayConfigurationEventTimeoutMs = 10 * 1000; - // A class to perform video frame capturing for mac. class ScreenCapturerMac : public ScreenCapturer { public: - ScreenCapturerMac(); + explicit ScreenCapturerMac( + scoped_refptr desktop_config_monitor); virtual ~ScreenCapturerMac(); bool Init(); @@ -135,6 +101,8 @@ class ScreenCapturerMac : public ScreenCapturer { virtual void Capture(const DesktopRegion& region) OVERRIDE; virtual void SetMouseShapeObserver( MouseShapeObserver* mouse_shape_observer) OVERRIDE; + virtual bool GetScreenList(ScreenList* screens) OVERRIDE; + virtual bool SelectScreen(ScreenId id) OVERRIDE; private: void CaptureCursor(); @@ -144,7 +112,8 @@ class ScreenCapturerMac : public ScreenCapturer { void GlBlitSlow(const DesktopFrame& frame); void CgBlitPreLion(const DesktopFrame& frame, const DesktopRegion& region); - void CgBlitPostLion(const DesktopFrame& frame, + // Returns false if the selected screen is no longer valid. + bool CgBlitPostLion(const DesktopFrame& frame, const DesktopRegion& region); // Called when the screen configuration is changed. @@ -157,8 +126,6 @@ class ScreenCapturerMac : public ScreenCapturer { void ScreenUpdateMove(CGScreenUpdateMoveDelta delta, size_t count, const CGRect *rect_array); - void DisplaysReconfigured(CGDirectDisplayID display, - CGDisplayChangeSummaryFlags flags); static void ScreenRefreshCallback(CGRectCount count, const CGRect *rect_array, void *user_parameter); @@ -166,12 +133,10 @@ class ScreenCapturerMac : public ScreenCapturer { size_t count, const CGRect *rect_array, void *user_parameter); - static void DisplaysReconfiguredCallback(CGDirectDisplayID display, - CGDisplayChangeSummaryFlags flags, - void *user_parameter); - void ReleaseBuffers(); + DesktopFrame* CreateFrame(); + Callback* callback_; MouseShapeObserver* mouse_shape_observer_; @@ -184,6 +149,16 @@ class ScreenCapturerMac : public ScreenCapturer { // Current display configuration. MacDesktopConfiguration desktop_config_; + // Currently selected display, or 0 if the full desktop is selected. On OS X + // 10.6 and before, this is always 0. + CGDirectDisplayID current_display_; + + // The physical pixel bounds of the current screen. + DesktopRect screen_pixel_bounds_; + + // The dip to physical pixel scale of the current screen. + float dip_to_pixel_scale_; + // A thread-safe list of invalid rectangles, and the size of the most // recently captured screen. ScreenCapturerHelper helper_; @@ -194,13 +169,8 @@ class ScreenCapturerMac : public ScreenCapturer { // Contains an invalid region from the previous capture. DesktopRegion last_invalid_region_; - // Used to ensure that frame captures do not take place while displays - // are being reconfigured. - scoped_ptr display_configuration_capture_event_; - - // Records the Ids of attached displays which are being reconfigured. - // Accessed on the thread on which we are notified of display events. - std::set reconfiguring_displays_; + // Monitoring display reconfiguration. + scoped_refptr desktop_config_monitor_; // Power management assertion to prevent the screen from sleeping. IOPMAssertionID power_assertion_id_display_; @@ -242,24 +212,14 @@ class InvertedDesktopFrame : public DesktopFrame { DISALLOW_COPY_AND_ASSIGN(InvertedDesktopFrame); }; -DesktopFrame* CreateFrame( - const MacDesktopConfiguration& desktop_config) { - - DesktopSize size(desktop_config.pixel_bounds.width(), - desktop_config.pixel_bounds.height()); - scoped_ptr frame(new BasicDesktopFrame(size)); - - frame->set_dpi(DesktopVector( - kStandardDPI * desktop_config.dip_to_pixel_scale, - kStandardDPI * desktop_config.dip_to_pixel_scale)); - return frame.release(); -} - -ScreenCapturerMac::ScreenCapturerMac() +ScreenCapturerMac::ScreenCapturerMac( + scoped_refptr desktop_config_monitor) : callback_(NULL), mouse_shape_observer_(NULL), cgl_context_(NULL), - display_configuration_capture_event_(EventWrapper::Create()), + current_display_(0), + dip_to_pixel_scale_(1.0f), + desktop_config_monitor_(desktop_config_monitor), power_assertion_id_display_(kIOPMNullAssertionID), power_assertion_id_user_(kIOPMNullAssertionID), app_services_library_(NULL), @@ -268,7 +228,6 @@ ScreenCapturerMac::ScreenCapturerMac() cg_display_bits_per_pixel_(NULL), opengl_library_(NULL), cgl_set_full_screen_(NULL) { - display_configuration_capture_event_->Set(); } ScreenCapturerMac::~ScreenCapturerMac() { @@ -283,11 +242,6 @@ ScreenCapturerMac::~ScreenCapturerMac() { ReleaseBuffers(); UnregisterRefreshAndMoveHandlers(); - CGError err = CGDisplayRemoveReconfigurationCallback( - ScreenCapturerMac::DisplaysReconfiguredCallback, this); - if (err != kCGErrorSuccess) - LOG(LS_ERROR) << "CGDisplayRemoveReconfigurationCallback " << err; - dlclose(app_services_library_); dlclose(opengl_library_); } @@ -296,14 +250,9 @@ bool ScreenCapturerMac::Init() { if (!RegisterRefreshAndMoveHandlers()) { return false; } - - CGError err = CGDisplayRegisterReconfigurationCallback( - ScreenCapturerMac::DisplaysReconfiguredCallback, this); - if (err != kCGErrorSuccess) { - LOG(LS_ERROR) << "CGDisplayRegisterReconfigurationCallback " << err; - return false; - } - + desktop_config_monitor_->Lock(); + desktop_config_ = desktop_config_monitor_->desktop_configuration(); + desktop_config_monitor_->Unlock(); ScreenConfigurationChanged(); return true; } @@ -348,14 +297,17 @@ void ScreenCapturerMac::Capture( queue_.MoveToNextFrame(); - // Wait until the display configuration is stable. If one or more displays - // are reconfiguring then |display_configuration_capture_event_| will not be - // set until the reconfiguration completes. - // TODO(wez): Replace this with an early-exit (See crbug.com/104542). - if (!display_configuration_capture_event_->Wait( - kDisplayConfigurationEventTimeoutMs)) { - LOG_F(LS_ERROR) << "Event wait timed out."; - abort(); + desktop_config_monitor_->Lock(); + MacDesktopConfiguration new_config = + desktop_config_monitor_->desktop_configuration(); + if (!desktop_config_.Equals(new_config)) { + desktop_config_ = new_config; + // If the display configuraiton has changed then refresh capturer data + // structures. Occasionally, the refresh and move handlers are lost when + // the screen mode changes, so re-register them here. + UnregisterRefreshAndMoveHandlers(); + RegisterRefreshAndMoveHandlers(); + ScreenConfigurationChanged(); } DesktopRegion region; @@ -365,7 +317,7 @@ void ScreenCapturerMac::Capture( // Note that we can't reallocate other buffers at this point, since the caller // may still be reading from them. if (!queue_.current_frame()) - queue_.ReplaceCurrentFrame(CreateFrame(desktop_config_)); + queue_.ReplaceCurrentFrame(CreateFrame()); DesktopFrame* current_frame = queue_.current_frame(); @@ -373,7 +325,10 @@ void ScreenCapturerMac::Capture( if (IsOSLionOrLater()) { // Lion requires us to use their new APIs for doing screen capture. These // APIS currently crash on 10.6.8 if there is no monitor attached. - CgBlitPostLion(*current_frame, region); + if (!CgBlitPostLion(*current_frame, region)) { + callback_->OnCaptureCompleted(NULL); + return; + } } else if (cgl_context_) { flip = true; if (pixel_buffer_object_.get() != 0) { @@ -397,7 +352,7 @@ void ScreenCapturerMac::Capture( // Signal that we are done capturing data from the display framebuffer, // and accessing display structures. - display_configuration_capture_event_->Set(); + desktop_config_monitor_->Unlock(); // Capture the current cursor shape and notify |callback_| if it has changed. CaptureCursor(); @@ -414,6 +369,47 @@ void ScreenCapturerMac::SetMouseShapeObserver( mouse_shape_observer_ = mouse_shape_observer; } +bool ScreenCapturerMac::GetScreenList(ScreenList* screens) { + assert(screens->size() == 0); + if (!IsOSLionOrLater()) { + // Single monitor cast is not supported on pre OS X 10.7. + Screen screen; + screen.id = kFullDesktopScreenId; + screens->push_back(screen); + return true; + } + + for (MacDisplayConfigurations::iterator it = desktop_config_.displays.begin(); + it != desktop_config_.displays.end(); ++it) { + Screen screen; + screen.id = static_cast(it->id); + screens->push_back(screen); + } + return true; +} + +bool ScreenCapturerMac::SelectScreen(ScreenId id) { + if (!IsOSLionOrLater()) { + // Ignore the screen selection on unsupported OS. + assert(!current_display_); + return id == kFullDesktopScreenId; + } + + if (id == kFullDesktopScreenId) { + current_display_ = 0; + } else { + const MacDisplayConfiguration* config = + desktop_config_.FindDisplayConfigurationById( + static_cast(id)); + if (!config) + return false; + current_display_ = config->id; + } + + ScreenConfigurationChanged(); + return true; +} + void ScreenCapturerMac::CaptureCursor() { if (!mouse_shape_observer_) return; @@ -607,7 +603,7 @@ void ScreenCapturerMac::CgBlitPreLion(const DesktopFrame& frame, } } -void ScreenCapturerMac::CgBlitPostLion(const DesktopFrame& frame, +bool ScreenCapturerMac::CgBlitPostLion(const DesktopFrame& frame, const DesktopRegion& region) { // Copy the entire contents of the previous capture buffer, to capture over. // TODO(wez): Get rid of this as per crbug.com/145064, or implement @@ -618,13 +614,37 @@ void ScreenCapturerMac::CgBlitPostLion(const DesktopFrame& frame, frame.stride() * frame.size().height()); } - for (size_t i = 0; i < desktop_config_.displays.size(); ++i) { - const MacDisplayConfiguration& display_config = desktop_config_.displays[i]; + MacDisplayConfigurations displays_to_capture; + if (current_display_) { + // Capturing a single screen. Note that the screen id may change when + // screens are added or removed. + const MacDisplayConfiguration* config = + desktop_config_.FindDisplayConfigurationById(current_display_); + if (config) { + displays_to_capture.push_back(*config); + } else { + LOG(LS_ERROR) << "The selected screen cannot be found for capturing."; + return false; + } + } else { + // Capturing the whole desktop. + displays_to_capture = desktop_config_.displays; + } + for (size_t i = 0; i < displays_to_capture.size(); ++i) { + const MacDisplayConfiguration& display_config = displays_to_capture[i]; + + // Capturing mixed-DPI on one surface is hard, so we only return displays + // that match the "primary" display's DPI. The primary display is always + // the first in the list. + if (i > 0 && display_config.dip_to_pixel_scale != + displays_to_capture[0].dip_to_pixel_scale) { + continue; + } // Determine the display's position relative to the desktop, in pixels. DesktopRect display_bounds = display_config.pixel_bounds; - display_bounds.Translate(-desktop_config_.pixel_bounds.left(), - -desktop_config_.pixel_bounds.top()); + display_bounds.Translate(-screen_pixel_bounds_.left(), + -screen_pixel_bounds_.top()); // Determine which parts of the blit region, if any, lay within the monitor. DesktopRegion copy_region = region; @@ -667,23 +687,28 @@ void ScreenCapturerMac::CgBlitPostLion(const DesktopFrame& frame, CFRelease(data); CFRelease(image); } + return true; } void ScreenCapturerMac::ScreenConfigurationChanged() { + if (current_display_) { + const MacDisplayConfiguration* config = + desktop_config_.FindDisplayConfigurationById(current_display_); + screen_pixel_bounds_ = config ? config->pixel_bounds : DesktopRect(); + dip_to_pixel_scale_ = config ? config->dip_to_pixel_scale : 1.0f; + } else { + screen_pixel_bounds_ = desktop_config_.pixel_bounds; + dip_to_pixel_scale_ = desktop_config_.dip_to_pixel_scale; + } + // Release existing buffers, which will be of the wrong size. ReleaseBuffers(); // Clear the dirty region, in case the display is down-sizing. helper_.ClearInvalidRegion(); - // Refresh the cached desktop configuration. - desktop_config_ = MacDesktopConfiguration::GetCurrent( - MacDesktopConfiguration::TopLeftOrigin); - // Re-mark the entire desktop as dirty. - helper_.InvalidateScreen( - DesktopSize(desktop_config_.pixel_bounds.width(), - desktop_config_.pixel_bounds.height())); + helper_.InvalidateScreen(screen_pixel_bounds_.size()); // Make sure the frame buffers will be reallocated. queue_.Reset(); @@ -764,8 +789,8 @@ void ScreenCapturerMac::ScreenConfigurationChanged() { (*cgl_set_full_screen_)(cgl_context_); CGLSetCurrentContext(cgl_context_); - size_t buffer_size = desktop_config_.pixel_bounds.width() * - desktop_config_.pixel_bounds.height() * + size_t buffer_size = screen_pixel_bounds_.width() * + screen_pixel_bounds_.height() * sizeof(uint32_t); pixel_buffer_object_.Init(cgl_context_, buffer_size); } @@ -797,20 +822,17 @@ void ScreenCapturerMac::UnregisterRefreshAndMoveHandlers() { void ScreenCapturerMac::ScreenRefresh(CGRectCount count, const CGRect* rect_array) { - if (desktop_config_.pixel_bounds.is_empty()) + if (screen_pixel_bounds_.is_empty()) return; DesktopRegion region; - + DesktopVector translate_vector = + DesktopVector().subtract(screen_pixel_bounds_.top_left()); for (CGRectCount i = 0; i < count; ++i) { // Convert from Density-Independent Pixel to physical pixel coordinates. - DesktopRect rect = - ScaleAndRoundCGRect(rect_array[i], desktop_config_.dip_to_pixel_scale); - + DesktopRect rect = ScaleAndRoundCGRect(rect_array[i], dip_to_pixel_scale_); // Translate from local desktop to capturer framebuffer coordinates. - rect.Translate(-desktop_config_.pixel_bounds.left(), - -desktop_config_.pixel_bounds.top()); - + rect.Translate(translate_vector); region.AddRect(rect); } @@ -830,45 +852,12 @@ void ScreenCapturerMac::ScreenUpdateMove(CGScreenUpdateMoveDelta delta, ScreenRefresh(count, refresh_rects); } -void ScreenCapturerMac::DisplaysReconfigured( - CGDirectDisplayID display, - CGDisplayChangeSummaryFlags flags) { - if (flags & kCGDisplayBeginConfigurationFlag) { - if (reconfiguring_displays_.empty()) { - // If this is the first display to start reconfiguring then wait on - // |display_configuration_capture_event_| to block the capture thread - // from accessing display memory until the reconfiguration completes. - if (!display_configuration_capture_event_->Wait( - kDisplayConfigurationEventTimeoutMs)) { - LOG_F(LS_ERROR) << "Event wait timed out."; - abort(); - } - } - - reconfiguring_displays_.insert(display); - } else { - reconfiguring_displays_.erase(display); - - if (reconfiguring_displays_.empty()) { - // If no other displays are reconfiguring then refresh capturer data - // structures and un-block the capturer thread. Occasionally, the - // refresh and move handlers are lost when the screen mode changes, - // so re-register them here (the same does not appear to be true for - // the reconfiguration handler itself). - UnregisterRefreshAndMoveHandlers(); - RegisterRefreshAndMoveHandlers(); - ScreenConfigurationChanged(); - display_configuration_capture_event_->Set(); - } - } -} - void ScreenCapturerMac::ScreenRefreshCallback(CGRectCount count, const CGRect* rect_array, void* user_parameter) { ScreenCapturerMac* capturer = reinterpret_cast(user_parameter); - if (capturer->desktop_config_.pixel_bounds.is_empty()) + if (capturer->screen_pixel_bounds_.is_empty()) capturer->ScreenConfigurationChanged(); capturer->ScreenRefresh(count, rect_array); } @@ -883,20 +872,24 @@ void ScreenCapturerMac::ScreenUpdateMoveCallback( capturer->ScreenUpdateMove(delta, count, rect_array); } -void ScreenCapturerMac::DisplaysReconfiguredCallback( - CGDirectDisplayID display, - CGDisplayChangeSummaryFlags flags, - void* user_parameter) { - ScreenCapturerMac* capturer = - reinterpret_cast(user_parameter); - capturer->DisplaysReconfigured(display, flags); +DesktopFrame* ScreenCapturerMac::CreateFrame() { + scoped_ptr frame( + new BasicDesktopFrame(screen_pixel_bounds_.size())); + + frame->set_dpi(DesktopVector(kStandardDPI * dip_to_pixel_scale_, + kStandardDPI * dip_to_pixel_scale_)); + return frame.release(); } } // namespace // static -ScreenCapturer* ScreenCapturer::Create() { - scoped_ptr capturer(new ScreenCapturerMac()); +ScreenCapturer* ScreenCapturer::Create(const DesktopCaptureOptions& options) { + if (!options.configuration_monitor()) + return NULL; + + scoped_ptr capturer( + new ScreenCapturerMac(options.configuration_monitor())); if (!capturer->Init()) capturer.reset(); return capturer.release(); diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_mock_objects.h b/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_mock_objects.h index 17673b5cc0eb..aa0e808eb69d 100644 --- a/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_mock_objects.h +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_mock_objects.h @@ -26,6 +26,8 @@ class MockScreenCapturer : public ScreenCapturer { MOCK_METHOD1(Capture, void(const DesktopRegion& region)); MOCK_METHOD1(SetMouseShapeObserver, void( MouseShapeObserver* mouse_shape_observer)); + MOCK_METHOD1(GetScreenList, bool(ScreenList* screens)); + MOCK_METHOD1(SelectScreen, bool(ScreenId id)); private: DISALLOW_COPY_AND_ASSIGN(MockScreenCapturer); diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_null.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_null.cc index a94d6035e492..a0bc7f13ea23 100644 --- a/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_null.cc +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_null.cc @@ -13,20 +13,8 @@ namespace webrtc { // static -ScreenCapturer* ScreenCapturer::Create() { +ScreenCapturer* ScreenCapturer::Create(const DesktopCaptureOptions& options) { return NULL; } -#if defined(OS_LINUX) -// static -ScreenCapturer* ScreenCapturer::CreateWithXDamage(bool use_x_damage) { - return NULL; -} -#elif defined(OS_WIN) -// static -ScreenCapturer* ScreenCapturer::CreateWithDisableAero(bool disable_aero) { - return NULL; -} -#endif // defined(OS_WIN) - } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_unittest.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_unittest.cc index d5ba213ef8cc..94c1f707e2c0 100644 --- a/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_unittest.cc @@ -12,6 +12,7 @@ #include "testing/gmock/include/gmock/gmock.h" #include "testing/gtest/include/gtest/gtest.h" +#include "webrtc/modules/desktop_capture/desktop_capture_options.h" #include "webrtc/modules/desktop_capture/desktop_frame.h" #include "webrtc/modules/desktop_capture/desktop_region.h" #include "webrtc/modules/desktop_capture/screen_capturer_mock_objects.h" @@ -29,6 +30,11 @@ class ScreenCapturerTest : public testing::Test { public: SharedMemory* CreateSharedMemory(size_t size); + virtual void SetUp() OVERRIDE { + capturer_.reset( + ScreenCapturer::Create(DesktopCaptureOptions::CreateDefault())); + } + protected: scoped_ptr capturer_; MockMouseShapeObserver mouse_observer_; @@ -53,8 +59,16 @@ SharedMemory* ScreenCapturerTest::CreateSharedMemory(size_t size) { return new FakeSharedMemory(new char[size], size); } +TEST_F(ScreenCapturerTest, GetScreenListAndSelectScreen) { + webrtc::ScreenCapturer::ScreenList screens; + EXPECT_TRUE(capturer_->GetScreenList(&screens)); + for(webrtc::ScreenCapturer::ScreenList::iterator it = screens.begin(); + it != screens.end(); ++it) { + EXPECT_TRUE(capturer_->SelectScreen(it->id)); + } +} + TEST_F(ScreenCapturerTest, StartCapturer) { - capturer_.reset(ScreenCapturer::Create()); capturer_->SetMouseShapeObserver(&mouse_observer_); capturer_->Start(&callback_); } @@ -71,7 +85,6 @@ TEST_F(ScreenCapturerTest, Capture) { .Times(AnyNumber()) .WillRepeatedly(Return(static_cast(NULL))); - capturer_.reset(ScreenCapturer::Create()); capturer_->Start(&callback_); capturer_->Capture(DesktopRegion()); @@ -106,7 +119,6 @@ TEST_F(ScreenCapturerTest, UseSharedBuffers) { .Times(AnyNumber()) .WillRepeatedly(Invoke(this, &ScreenCapturerTest::CreateSharedMemory)); - capturer_.reset(ScreenCapturer::Create()); capturer_->Start(&callback_); capturer_->Capture(DesktopRegion()); diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_win.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_win.cc index 969b6a5a4197..d45e69bb169f 100644 --- a/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_win.cc +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_win.cc @@ -12,10 +12,12 @@ #include +#include "webrtc/modules/desktop_capture/desktop_capture_options.h" #include "webrtc/modules/desktop_capture/desktop_frame.h" #include "webrtc/modules/desktop_capture/desktop_frame_win.h" #include "webrtc/modules/desktop_capture/desktop_region.h" #include "webrtc/modules/desktop_capture/differ.h" +#include "webrtc/modules/desktop_capture/mouse_cursor.h" #include "webrtc/modules/desktop_capture/mouse_cursor_shape.h" #include "webrtc/modules/desktop_capture/screen_capture_frame_queue.h" #include "webrtc/modules/desktop_capture/screen_capturer_helper.h" @@ -43,7 +45,7 @@ const wchar_t kDwmapiLibraryName[] = L"dwmapi.dll"; // ScreenCapturerWin is double-buffered as required by ScreenCapturer. class ScreenCapturerWin : public ScreenCapturer { public: - ScreenCapturerWin(bool disable_aero); + ScreenCapturerWin(const DesktopCaptureOptions& options); virtual ~ScreenCapturerWin(); // Overridden from ScreenCapturer: @@ -51,19 +53,28 @@ class ScreenCapturerWin : public ScreenCapturer { virtual void Capture(const DesktopRegion& region) OVERRIDE; virtual void SetMouseShapeObserver( MouseShapeObserver* mouse_shape_observer) OVERRIDE; + virtual bool GetScreenList(ScreenList* screens) OVERRIDE; + virtual bool SelectScreen(ScreenId id) OVERRIDE; private: // Make sure that the device contexts match the screen configuration. void PrepareCaptureResources(); - // Captures the current screen contents into the current buffer. - void CaptureImage(); + // Captures the current screen contents into the current buffer. Returns true + // if succeeded. + bool CaptureImage(); // Capture the current cursor shape. void CaptureCursor(); + // Get the rect of the currently selected screen. If the screen is disabled + // or disconnected, or any error happens, an empty rect is returned. + DesktopRect GetScreenRect(); + Callback* callback_; MouseShapeObserver* mouse_shape_observer_; + ScreenId current_screen_id_; + std::wstring current_device_key_; // A thread-safe list of invalid rectangles, and the size of the most // recently captured screen. @@ -98,15 +109,16 @@ class ScreenCapturerWin : public ScreenCapturer { DISALLOW_COPY_AND_ASSIGN(ScreenCapturerWin); }; -ScreenCapturerWin::ScreenCapturerWin(bool disable_aero) +ScreenCapturerWin::ScreenCapturerWin(const DesktopCaptureOptions& options) : callback_(NULL), mouse_shape_observer_(NULL), + current_screen_id_(kFullDesktopScreenId), desktop_dc_(NULL), memory_dc_(NULL), dwmapi_library_(NULL), composition_func_(NULL), set_thread_execution_state_failed_(false) { - if (disable_aero) { + if (options.disable_effects()) { // Load dwmapi.dll dynamically since it is not available on XP. if (!dwmapi_library_) dwmapi_library_ = LoadLibrary(kDwmapiLibraryName); @@ -150,11 +162,14 @@ void ScreenCapturerWin::Capture(const DesktopRegion& region) { PrepareCaptureResources(); // Copy screen bits to the current buffer. - CaptureImage(); + if (!CaptureImage()) { + callback_->OnCaptureCompleted(NULL); + return; + } const DesktopFrame* current_frame = queue_.current_frame(); const DesktopFrame* last_frame = queue_.previous_frame(); - if (last_frame) { + if (last_frame && last_frame->size().equals(current_frame->size())) { // Make sure the differencer is set up correctly for these previous and // current screens. if (!differ_.get() || @@ -202,6 +217,43 @@ void ScreenCapturerWin::SetMouseShapeObserver( mouse_shape_observer_ = mouse_shape_observer; } +bool ScreenCapturerWin::GetScreenList(ScreenList* screens) { + assert(screens->size() == 0); + BOOL enum_result = TRUE; + for (int device_index = 0; ; ++device_index) { + DISPLAY_DEVICE device; + device.cb = sizeof(device); + enum_result = EnumDisplayDevices(NULL, device_index, &device, 0); + // |enum_result| is 0 if we have enumerated all devices. + if (!enum_result) + break; + + // We only care about active displays. + if (!(device.StateFlags & DISPLAY_DEVICE_ACTIVE)) + continue; + Screen screen; + screen.id = device_index; + screens->push_back(screen); + } + return true; +} + +bool ScreenCapturerWin::SelectScreen(ScreenId id) { + if (id == kFullDesktopScreenId) { + current_screen_id_ = id; + return true; + } + DISPLAY_DEVICE device; + device.cb = sizeof(device); + BOOL enum_result = EnumDisplayDevices(NULL, id, &device, 0); + if (!enum_result) + return false; + + current_device_key_ = device.DeviceKey; + current_screen_id_ = id; + return true; +} + void ScreenCapturerWin::Start(Callback* callback) { assert(!callback_); assert(callback); @@ -280,17 +332,19 @@ void ScreenCapturerWin::PrepareCaptureResources() { } } -void ScreenCapturerWin::CaptureImage() { +bool ScreenCapturerWin::CaptureImage() { + DesktopRect screen_rect = GetScreenRect(); + if (screen_rect.is_empty()) + return false; + DesktopSize size = screen_rect.size(); // If the current buffer is from an older generation then allocate a new one. // Note that we can't reallocate other buffers at this point, since the caller // may still be reading from them. - if (!queue_.current_frame()) { + if (!queue_.current_frame() || + !queue_.current_frame()->size().equals(size)) { assert(desktop_dc_ != NULL); assert(memory_dc_ != NULL); - DesktopSize size = DesktopSize( - desktop_dc_rect_.width(), desktop_dc_rect_.height()); - size_t buffer_size = size.width() * size.height() * DesktopFrame::kBytesPerPixel; SharedMemory* shared_memory = @@ -307,15 +361,16 @@ void ScreenCapturerWin::CaptureImage() { HGDIOBJ previous_object = SelectObject(memory_dc_, current->bitmap()); if (previous_object != NULL) { BitBlt(memory_dc_, - 0, 0, desktop_dc_rect_.width(), desktop_dc_rect_.height(), + 0, 0, screen_rect.width(), screen_rect.height(), desktop_dc_, - desktop_dc_rect_.left(), desktop_dc_rect_.top(), + screen_rect.left(), screen_rect.top(), SRCCOPY | CAPTUREBLT); // Select back the previously selected object to that the device contect // could be destroyed independently of the bitmap if needed. SelectObject(memory_dc_, previous_object); } + return true; } void ScreenCapturerWin::CaptureCursor() { @@ -327,11 +382,22 @@ void ScreenCapturerWin::CaptureCursor() { } // Note that |cursor_info.hCursor| does not need to be freed. - scoped_ptr cursor( - CreateMouseCursorShapeFromCursor(desktop_dc_, cursor_info.hCursor)); - if (!cursor.get()) + scoped_ptr cursor_image( + CreateMouseCursorFromHCursor(desktop_dc_, cursor_info.hCursor)); + if (!cursor_image.get()) return; + scoped_ptr cursor(new MouseCursorShape); + cursor->hotspot = cursor_image->hotspot(); + cursor->size = cursor_image->image()->size(); + uint8_t* current_row = cursor_image->image()->data(); + for (int y = 0; y < cursor_image->image()->size().height(); ++y) { + cursor->data.append(current_row, + current_row + cursor_image->image()->size().width() * + DesktopFrame::kBytesPerPixel); + current_row += cursor_image->image()->stride(); + } + // Compare the current cursor with the last one we sent to the client. If // they're the same, then don't bother sending the cursor again. if (last_cursor_.size.equals(cursor->size) && @@ -350,16 +416,43 @@ void ScreenCapturerWin::CaptureCursor() { mouse_shape_observer_->OnCursorShapeChanged(cursor.release()); } +DesktopRect ScreenCapturerWin::GetScreenRect() { + DesktopRect rect = desktop_dc_rect_; + if (current_screen_id_ == kFullDesktopScreenId) + return rect; + + DISPLAY_DEVICE device; + device.cb = sizeof(device); + BOOL result = EnumDisplayDevices(NULL, current_screen_id_, &device, 0); + if (!result) + return DesktopRect(); + + // Verifies the device index still maps to the same display device. DeviceKey + // is documented as reserved, but it actually contains the registry key for + // the device and is unique for each monitor, while DeviceID is not. + if (current_device_key_ != device.DeviceKey) + return DesktopRect(); + + DEVMODE device_mode; + device_mode.dmSize = sizeof(device_mode); + device_mode.dmDriverExtra = 0; + result = EnumDisplaySettingsEx( + device.DeviceName, ENUM_CURRENT_SETTINGS, &device_mode, 0); + if (!result) + return DesktopRect(); + + rect = DesktopRect::MakeXYWH( + rect.left() + device_mode.dmPosition.x, + rect.top() + device_mode.dmPosition.y, + device_mode.dmPelsWidth, + device_mode.dmPelsHeight); + return rect; +} } // namespace // static -ScreenCapturer* ScreenCapturer::Create() { - return CreateWithDisableAero(true); -} - -// static -ScreenCapturer* ScreenCapturer::CreateWithDisableAero(bool disable_aero) { - return new ScreenCapturerWin(disable_aero); +ScreenCapturer* ScreenCapturer::Create(const DesktopCaptureOptions& options) { + return new ScreenCapturerWin(options); } } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_x11.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_x11.cc index 5cca65b133b5..4d07d98c6283 100644 --- a/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_x11.cc +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_x11.cc @@ -18,6 +18,7 @@ #include #include +#include "webrtc/modules/desktop_capture/desktop_capture_options.h" #include "webrtc/modules/desktop_capture/desktop_frame.h" #include "webrtc/modules/desktop_capture/differ.h" #include "webrtc/modules/desktop_capture/mouse_cursor_shape.h" @@ -40,13 +41,14 @@ namespace webrtc { namespace { // A class to perform video frame capturing for Linux. -class ScreenCapturerLinux : public ScreenCapturer { +class ScreenCapturerLinux : public ScreenCapturer, + public SharedXDisplay::XEventHandler { public: ScreenCapturerLinux(); virtual ~ScreenCapturerLinux(); // TODO(ajwong): Do we really want this to be synchronous? - bool Init(bool use_x_damage); + bool Init(const DesktopCaptureOptions& options); // DesktopCapturer interface. virtual void Start(Callback* delegate) OVERRIDE; @@ -55,23 +57,23 @@ class ScreenCapturerLinux : public ScreenCapturer { // ScreenCapturer interface. virtual void SetMouseShapeObserver( MouseShapeObserver* mouse_shape_observer) OVERRIDE; + virtual bool GetScreenList(ScreenList* screens) OVERRIDE; + virtual bool SelectScreen(ScreenId id) OVERRIDE; private: - void InitXDamage(); + Display* display() { return options_.x_display()->display(); } - // Read and handle all currently-pending XEvents. - // In the DAMAGE case, process the XDamage events and store the resulting - // damage rectangles in the ScreenCapturerHelper. - // In all cases, call ScreenConfigurationChanged() in response to any - // ConfigNotify events. - void ProcessPendingXEvents(); + // SharedXDisplay::XEventHandler interface. + virtual bool HandleXEvent(const XEvent& event) OVERRIDE; + + void InitXDamage(); // Capture the cursor image and notify the delegate if it was captured. void CaptureCursor(); // Capture screen pixels to the current buffer in the queue. In the DAMAGE // case, the ScreenCapturerHelper already holds the list of invalid rectangles - // from ProcessPendingXEvents(). In the non-DAMAGE case, this captures the + // from HandleXEvent(). In the non-DAMAGE case, this captures the // whole screen, then calculates some invalid rectangles that include any // differences between this and the previous capture. DesktopFrame* CaptureScreen(); @@ -88,11 +90,12 @@ class ScreenCapturerLinux : public ScreenCapturer { void DeinitXlib(); + DesktopCaptureOptions options_; + Callback* callback_; MouseShapeObserver* mouse_shape_observer_; // X11 graphics context. - Display* display_; GC gc_; Window root_window_; @@ -131,7 +134,6 @@ class ScreenCapturerLinux : public ScreenCapturer { ScreenCapturerLinux::ScreenCapturerLinux() : callback_(NULL), mouse_shape_observer_(NULL), - display_(NULL), gc_(NULL), root_window_(BadValue), has_xfixes_(false), @@ -146,35 +148,40 @@ ScreenCapturerLinux::ScreenCapturerLinux() } ScreenCapturerLinux::~ScreenCapturerLinux() { + options_.x_display()->RemoveEventHandler(ConfigureNotify, this); + if (use_damage_) { + options_.x_display()->RemoveEventHandler( + damage_event_base_ + XDamageNotify, this); + } + if (has_xfixes_) { + options_.x_display()->RemoveEventHandler( + xfixes_event_base_ + XFixesCursorNotify, this); + } DeinitXlib(); } -bool ScreenCapturerLinux::Init(bool use_x_damage) { - // TODO(ajwong): We should specify the display string we are attaching to - // in the constructor. - display_ = XOpenDisplay(NULL); - if (!display_) { - LOG(LS_ERROR) << "Unable to open display"; - return false; - } +bool ScreenCapturerLinux::Init(const DesktopCaptureOptions& options) { + options_ = options; - root_window_ = RootWindow(display_, DefaultScreen(display_)); + root_window_ = RootWindow(display(), DefaultScreen(display())); if (root_window_ == BadValue) { LOG(LS_ERROR) << "Unable to get the root window"; DeinitXlib(); return false; } - gc_ = XCreateGC(display_, root_window_, 0, NULL); + gc_ = XCreateGC(display(), root_window_, 0, NULL); if (gc_ == NULL) { LOG(LS_ERROR) << "Unable to get graphics context"; DeinitXlib(); return false; } + options_.x_display()->AddEventHandler(ConfigureNotify, this); + // Check for XFixes extension. This is required for cursor shape // notifications, and for our use of XDamage. - if (XFixesQueryExtension(display_, &xfixes_event_base_, + if (XFixesQueryExtension(display(), &xfixes_event_base_, &xfixes_error_base_)) { has_xfixes_ = true; } else { @@ -182,20 +189,22 @@ bool ScreenCapturerLinux::Init(bool use_x_damage) { } // Register for changes to the dimensions of the root window. - XSelectInput(display_, root_window_, StructureNotifyMask); + XSelectInput(display(), root_window_, StructureNotifyMask); - if (!x_server_pixel_buffer_.Init(display_, DefaultRootWindow(display_))) { + if (!x_server_pixel_buffer_.Init(display(), DefaultRootWindow(display()))) { LOG(LS_ERROR) << "Failed to initialize pixel buffer."; return false; } if (has_xfixes_) { // Register for changes to the cursor shape. - XFixesSelectCursorInput(display_, root_window_, + XFixesSelectCursorInput(display(), root_window_, XFixesDisplayCursorNotifyMask); + options_.x_display()->AddEventHandler( + xfixes_event_base_ + XFixesCursorNotify, this); } - if (use_x_damage) { + if (options_.use_update_notifications()) { InitXDamage(); } @@ -209,7 +218,7 @@ void ScreenCapturerLinux::InitXDamage() { } // Check for XDamage extension. - if (!XDamageQueryExtension(display_, &damage_event_base_, + if (!XDamageQueryExtension(display(), &damage_event_base_, &damage_error_base_)) { LOG(LS_INFO) << "X server does not support XDamage."; return; @@ -221,7 +230,7 @@ void ScreenCapturerLinux::InitXDamage() { // properly. // Request notifications every time the screen becomes damaged. - damage_handle_ = XDamageCreate(display_, root_window_, + damage_handle_ = XDamageCreate(display(), root_window_, XDamageReportNonEmpty); if (!damage_handle_) { LOG(LS_ERROR) << "Unable to initialize XDamage."; @@ -229,13 +238,16 @@ void ScreenCapturerLinux::InitXDamage() { } // Create an XFixes server-side region to collate damage into. - damage_region_ = XFixesCreateRegion(display_, 0, 0); + damage_region_ = XFixesCreateRegion(display(), 0, 0); if (!damage_region_) { - XDamageDestroy(display_, damage_handle_); + XDamageDestroy(display(), damage_handle_); LOG(LS_ERROR) << "Unable to create XFixes region."; return; } + options_.x_display()->AddEventHandler( + damage_event_base_ + XDamageNotify, this); + use_damage_ = true; LOG(LS_INFO) << "Using XDamage extension."; } @@ -253,7 +265,7 @@ void ScreenCapturerLinux::Capture(const DesktopRegion& region) { queue_.MoveToNextFrame(); // Process XEvents for XDamage and cursor shape tracking. - ProcessPendingXEvents(); + options_.x_display()->ProcessPendingXEvents(); // ProcessPendingXEvents() may call ScreenConfigurationChanged() which // reinitializes |x_server_pixel_buffer_|. Check if the pixel buffer is still @@ -300,36 +312,50 @@ void ScreenCapturerLinux::SetMouseShapeObserver( mouse_shape_observer_ = mouse_shape_observer; } -void ScreenCapturerLinux::ProcessPendingXEvents() { - // Find the number of events that are outstanding "now." We don't just loop - // on XPending because we want to guarantee this terminates. - int events_to_process = XPending(display_); - XEvent e; +bool ScreenCapturerLinux::GetScreenList(ScreenList* screens) { + DCHECK(screens->size() == 0); + // TODO(jiayl): implement screen enumeration. + Screen default_screen; + default_screen.id = 0; + screens->push_back(default_screen); + return true; +} - for (int i = 0; i < events_to_process; i++) { - XNextEvent(display_, &e); - if (use_damage_ && (e.type == damage_event_base_ + XDamageNotify)) { - XDamageNotifyEvent* event = reinterpret_cast(&e); - DCHECK(event->level == XDamageReportNonEmpty); - } else if (e.type == ConfigureNotify) { - ScreenConfigurationChanged(); - } else if (has_xfixes_ && - e.type == xfixes_event_base_ + XFixesCursorNotify) { - XFixesCursorNotifyEvent* cne; - cne = reinterpret_cast(&e); - if (cne->subtype == XFixesDisplayCursorNotify) { - CaptureCursor(); - } - } else { - LOG(LS_WARNING) << "Got unknown event type: " << e.type; +bool ScreenCapturerLinux::SelectScreen(ScreenId id) { + // TODO(jiayl): implement screen selection. + return true; +} + +bool ScreenCapturerLinux::HandleXEvent(const XEvent& event) { + if (use_damage_ && (event.type == damage_event_base_ + XDamageNotify)) { + const XDamageNotifyEvent* damage_event = + reinterpret_cast(&event); + if (damage_event->damage != damage_handle_) + return false; + DCHECK(damage_event->level == XDamageReportNonEmpty); + return true; + } else if (event.type == ConfigureNotify) { + ScreenConfigurationChanged(); + return true; + } else if (has_xfixes_ && + event.type == xfixes_event_base_ + XFixesCursorNotify) { + const XFixesCursorNotifyEvent* cursor_event = + reinterpret_cast(&event); + if (cursor_event->window == root_window_ && + cursor_event->subtype == XFixesDisplayCursorNotify) { + CaptureCursor(); } + // Always return false for cursor notifications, because there might be + // other listeners for these for the same window. + return false; } + return false; } void ScreenCapturerLinux::CaptureCursor() { DCHECK(has_xfixes_); - XFixesCursorImage* img = XFixesGetCursorImage(display_); + XFixesCursorImage* img = XFixesGetCursorImage(display()); if (!img) { return; } @@ -375,10 +401,10 @@ DesktopFrame* ScreenCapturerLinux::CaptureScreen() { x_server_pixel_buffer_.Synchronize(); if (use_damage_ && queue_.previous_frame()) { // Atomically fetch and clear the damage region. - XDamageSubtract(display_, damage_handle_, None, damage_region_); + XDamageSubtract(display(), damage_handle_, None, damage_region_); int rects_num = 0; XRectangle bounds; - XRectangle* rects = XFixesFetchRegionAndBounds(display_, damage_region_, + XRectangle* rects = XFixesFetchRegionAndBounds(display(), damage_region_, &rects_num, &bounds); for (int i = 0; i < rects_num; ++i) { updated_region->AddRect(DesktopRect::MakeXYWH( @@ -430,7 +456,7 @@ void ScreenCapturerLinux::ScreenConfigurationChanged() { queue_.Reset(); helper_.ClearInvalidRegion(); - if (!x_server_pixel_buffer_.Init(display_, DefaultRootWindow(display_))) { + if (!x_server_pixel_buffer_.Init(display(), DefaultRootWindow(display()))) { LOG(LS_ERROR) << "Failed to initialize pixel buffer after screen " "configuration change."; } @@ -452,51 +478,40 @@ void ScreenCapturerLinux::SynchronizeFrame() { DCHECK(current != last); for (DesktopRegion::Iterator it(last_invalid_region_); !it.IsAtEnd(); it.Advance()) { - const DesktopRect& r = it.rect(); - int offset = r.top() * current->stride() + - r.left() * DesktopFrame::kBytesPerPixel; - for (int i = 0; i < r.height(); ++i) { - memcpy(current->data() + offset, last->data() + offset, - r.width() * DesktopFrame::kBytesPerPixel); - offset += current->size().width() * DesktopFrame::kBytesPerPixel; - } + current->CopyPixelsFrom(*last, it.rect().top_left(), it.rect()); } } void ScreenCapturerLinux::DeinitXlib() { if (gc_) { - XFreeGC(display_, gc_); + XFreeGC(display(), gc_); gc_ = NULL; } x_server_pixel_buffer_.Release(); - if (display_) { - if (damage_handle_) - XDamageDestroy(display_, damage_handle_); - if (damage_region_) - XFixesDestroyRegion(display_, damage_region_); - XCloseDisplay(display_); - display_ = NULL; - damage_handle_ = 0; - damage_region_ = 0; + if (display()) { + if (damage_handle_) { + XDamageDestroy(display(), damage_handle_); + damage_handle_ = 0; + } + + if (damage_region_) { + XFixesDestroyRegion(display(), damage_region_); + damage_region_ = 0; + } } } } // namespace // static -ScreenCapturer* ScreenCapturer::Create() { - scoped_ptr capturer(new ScreenCapturerLinux()); - if (!capturer->Init(false)) - capturer.reset(); - return capturer.release(); -} +ScreenCapturer* ScreenCapturer::Create(const DesktopCaptureOptions& options) { + if (!options.x_display()) + return NULL; -// static -ScreenCapturer* ScreenCapturer::CreateWithXDamage(bool use_x_damage) { scoped_ptr capturer(new ScreenCapturerLinux()); - if (!capturer->Init(use_x_damage)) + if (!capturer->Init(options)) capturer.reset(); return capturer.release(); } diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/win/cursor.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/win/cursor.cc index 76eed7742356..11bb2dbb6d0f 100644 --- a/media/webrtc/trunk/webrtc/modules/desktop_capture/win/cursor.cc +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/win/cursor.cc @@ -15,6 +15,7 @@ #include "webrtc/modules/desktop_capture/win/scoped_gdi_object.h" #include "webrtc/modules/desktop_capture/desktop_frame.h" #include "webrtc/modules/desktop_capture/desktop_geometry.h" +#include "webrtc/modules/desktop_capture/mouse_cursor.h" #include "webrtc/system_wrappers/interface/compile_assert.h" #include "webrtc/system_wrappers/interface/logging.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" @@ -77,7 +78,8 @@ void AddCursorOutline(int width, int height, uint32_t* data) { // Premultiplies RGB components of the pixel data in the given image by // the corresponding alpha components. void AlphaMul(uint32_t* data, int width, int height) { - COMPILE_ASSERT(sizeof(uint32_t) == kBytesPerPixel); + COMPILE_ASSERT(sizeof(uint32_t) == kBytesPerPixel, + size_of_uint32_should_be_the_bytes_per_pixel); for (uint32_t* data_end = data + width * height; data != data_end; ++data) { RGBQUAD* from = reinterpret_cast(data); @@ -92,29 +94,24 @@ void AlphaMul(uint32_t* data, int width, int height) { } // Scans a 32bpp bitmap looking for any pixels with non-zero alpha component. -// |*has_alpha| is set to true if non-zero alpha is found. |stride| is expressed -// in pixels. -bool HasAlphaChannel(const uint32_t* data, int stride, int width, int height, - bool* has_alpha) { +// Returns true if non-zero alpha is found. |stride| is expressed in pixels. +bool HasAlphaChannel(const uint32_t* data, int stride, int width, int height) { const RGBQUAD* plane = reinterpret_cast(data); for (int y = 0; y < height; ++y) { for (int x = 0; x < width; ++x) { - if (plane->rgbReserved != 0) { - *has_alpha = true; + if (plane->rgbReserved != 0) return true; - } plane += 1; } plane += stride - width; } - *has_alpha = false; - return true; + return false; } } // namespace -MouseCursorShape* CreateMouseCursorShapeFromCursor(HDC dc, HCURSOR cursor) { +MouseCursor* CreateMouseCursorFromHCursor(HDC dc, HCURSOR cursor) { ICONINFO iinfo; if (!GetIconInfo(cursor, &iinfo)) { LOG_F(LS_ERROR) << "Unable to get cursor icon info. Error = " @@ -167,20 +164,18 @@ MouseCursorShape* CreateMouseCursorShapeFromCursor(HDC dc, HCURSOR cursor) { } uint32_t* mask_plane = mask_data.get(); - - scoped_array color_data; - uint32_t* color_plane = NULL; - int color_stride = 0; + scoped_ptr image( + new BasicDesktopFrame(DesktopSize(width, height))); bool has_alpha = false; if (is_color) { + image.reset(new BasicDesktopFrame(DesktopSize(width, height))); // Get the pixels from the color bitmap. - color_data.reset(new uint32_t[width * height]); if (!GetDIBits(dc, scoped_color, 0, height, - color_data.get(), + image->data(), reinterpret_cast(&bmi), DIB_RGB_COLORS)) { LOG_F(LS_ERROR) << "Unable to get bitmap bits. Error = " @@ -188,30 +183,28 @@ MouseCursorShape* CreateMouseCursorShapeFromCursor(HDC dc, HCURSOR cursor) { return NULL; } - color_plane = color_data.get(); - color_stride = width; - // GetDIBits() does not provide any indication whether the bitmap has alpha // channel, so we use HasAlphaChannel() below to find it out. - if (!HasAlphaChannel(color_plane, color_stride, width, height, &has_alpha)) - return NULL; + has_alpha = HasAlphaChannel(reinterpret_cast(image->data()), + width, width, height); } else { // For non-color cursors, the mask contains both an AND and an XOR mask and // the height includes both. Thus, the width is correct, but we need to // divide by 2 to get the correct mask height. height /= 2; + image.reset(new BasicDesktopFrame(DesktopSize(width, height))); + // The XOR mask becomes the color bitmap. - color_plane = mask_plane + (width * height); - color_stride = width; + memcpy( + image->data(), mask_plane + (width * height), image->stride() * width); } // Reconstruct transparency from the mask if the color image does not has // alpha channel. if (!has_alpha) { bool add_outline = false; - uint32_t* color = color_plane; - uint32_t* dst = color_plane; + uint32_t* dst = reinterpret_cast(image->data()); uint32_t* mask = mask_plane; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { @@ -226,36 +219,32 @@ MouseCursorShape* CreateMouseCursorShapeFromCursor(HDC dc, HCURSOR cursor) { // with black. In this case, we also add an outline around the cursor // so that it is visible against a dark background. if (*mask == kPixelRgbWhite) { - if (*color != 0) { + if (*dst != 0) { add_outline = true; *dst = kPixelRgbaBlack; } else { *dst = kPixelRgbaTransparent; } } else { - *dst = kPixelRgbaBlack ^ *color; + *dst = kPixelRgbaBlack ^ *dst; } - ++color; ++dst; ++mask; } } if (add_outline) { - AddCursorOutline(width, height, color_plane); + AddCursorOutline( + width, height, reinterpret_cast(image->data())); } } - // Pre-multiply the resulting pixels since MouseCursorShape uses premultiplied + // Pre-multiply the resulting pixels since MouseCursor uses premultiplied // images. - AlphaMul(color_plane, width, height); + AlphaMul(reinterpret_cast(image->data()), width, height); - scoped_ptr result(new MouseCursorShape()); - result->data.assign(reinterpret_cast(color_plane), - height * width * kBytesPerPixel); - result->size.set(width, height); - result->hotspot.set(hotspot_x, hotspot_y); - return result.release(); + return new MouseCursor( + image.release(), DesktopVector(hotspot_x, hotspot_y)); } } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/win/cursor.h b/media/webrtc/trunk/webrtc/modules/desktop_capture/win/cursor.h index 08a6c4a0eff0..d521cc08194e 100644 --- a/media/webrtc/trunk/webrtc/modules/desktop_capture/win/cursor.h +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/win/cursor.h @@ -13,13 +13,12 @@ #include -#include "webrtc/modules/desktop_capture/mouse_cursor_shape.h" - namespace webrtc { -// Converts a cursor into a |MouseCursorShape| instance. -MouseCursorShape* CreateMouseCursorShapeFromCursor( - HDC dc, HCURSOR cursor); +class MouseCursor; + +// Converts an HCURSOR into a |MouseCursor| instance. +MouseCursor* CreateMouseCursorFromHCursor(HDC dc, HCURSOR cursor); } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/win/cursor_unittest.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/win/cursor_unittest.cc index c1c741736610..9d2387483d6b 100644 --- a/media/webrtc/trunk/webrtc/modules/desktop_capture/win/cursor_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/win/cursor_unittest.cc @@ -9,7 +9,9 @@ */ #include "testing/gmock/include/gmock/gmock.h" +#include "webrtc/modules/desktop_capture/desktop_frame.h" #include "webrtc/modules/desktop_capture/desktop_geometry.h" +#include "webrtc/modules/desktop_capture/mouse_cursor.h" #include "webrtc/modules/desktop_capture/win/cursor.h" #include "webrtc/modules/desktop_capture/win/cursor_unittest_resources.h" #include "webrtc/modules/desktop_capture/win/scoped_gdi_object.h" @@ -19,9 +21,9 @@ namespace webrtc { namespace { -// Loads |left| from resources, converts it to a |MouseCursorShape| instance -// and compares pixels with |right|. Returns true of MouseCursorShape bits -// match |right|. |right| must be a 32bpp cursor with alpha channel. +// Loads |left| from resources, converts it to a |MouseCursor| instance and +// compares pixels with |right|. Returns true of MouseCursor bits match |right|. +// |right| must be a 32bpp cursor with alpha channel. bool ConvertToMouseShapeAndCompare(unsigned left, unsigned right) { HMODULE instance = GetModuleHandle(NULL); @@ -32,8 +34,8 @@ bool ConvertToMouseShapeAndCompare(unsigned left, unsigned right) { // Convert |cursor| to |mouse_shape|. HDC dc = GetDC(NULL); - scoped_ptr mouse_shape( - CreateMouseCursorShapeFromCursor(dc, cursor)); + scoped_ptr mouse_shape( + CreateMouseCursorFromHCursor(dc, cursor)); ReleaseDC(NULL, dc); EXPECT_TRUE(mouse_shape.get()); @@ -56,7 +58,7 @@ bool ConvertToMouseShapeAndCompare(unsigned left, unsigned right) { int width = bitmap_info.bmWidth; int height = bitmap_info.bmHeight; - EXPECT_TRUE(DesktopSize(width, height).equals(mouse_shape->size)); + EXPECT_TRUE(DesktopSize(width, height).equals(mouse_shape->image()->size())); // Get the pixels from |scoped_color|. int size = width * height; @@ -64,13 +66,13 @@ bool ConvertToMouseShapeAndCompare(unsigned left, unsigned right) { EXPECT_TRUE(GetBitmapBits(scoped_color, size * sizeof(uint32_t), data.get())); // Compare the 32bpp image in |mouse_shape| with the one loaded from |right|. - return memcmp(data.get(), mouse_shape->data.data(), + return memcmp(data.get(), mouse_shape->image()->data(), size * sizeof(uint32_t)) == 0; } } // namespace -TEST(MouseCursorShapeTest, MatchCursors) { +TEST(MouseCursorTest, MatchCursors) { EXPECT_TRUE(ConvertToMouseShapeAndCompare(IDD_CURSOR1_24BPP, IDD_CURSOR1_32BPP)); diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/run_loop.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer.cc similarity index 65% rename from media/webrtc/trunk/webrtc/video_engine/test/common/run_loop.cc rename to media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer.cc index 730071d4c40c..c5176d5e6050 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/run_loop.cc +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer.cc @@ -1,4 +1,4 @@ -/* + /* * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license @@ -7,16 +7,16 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#include "webrtc/video_engine/test/common/run_loop.h" -#include +#include "webrtc/modules/desktop_capture/window_capturer.h" + +#include "webrtc/modules/desktop_capture/desktop_capture_options.h" namespace webrtc { -namespace test { -void PressEnterToContinue() { - puts(">> Press ENTER to continue..."); - while (getchar() != '\n' && !feof(stdin)); +// static +WindowCapturer* WindowCapturer::Create() { + return Create(DesktopCaptureOptions::CreateDefault()); } -} // namespace test + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer.h b/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer.h index 8cc57c14d583..478c8ee99c3d 100644 --- a/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer.h +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer.h @@ -14,15 +14,18 @@ #include #include +#include "webrtc/modules/desktop_capture/desktop_capture_types.h" #include "webrtc/modules/desktop_capture/desktop_capturer.h" #include "webrtc/system_wrappers/interface/constructor_magic.h" #include "webrtc/typedefs.h" namespace webrtc { +class DesktopCaptureOptions; + class WindowCapturer : public DesktopCapturer { public: - typedef intptr_t WindowId; + typedef webrtc::WindowId WindowId; struct Window { WindowId id; @@ -33,6 +36,9 @@ class WindowCapturer : public DesktopCapturer { typedef std::vector WindowList; + static WindowCapturer* Create(const DesktopCaptureOptions& options); + + // TODO(sergeyu): Remove this method. crbug.com/172183 static WindowCapturer* Create(); virtual ~WindowCapturer() {} diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer_mac.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer_mac.cc index e78d95bc2e93..6268fc011564 100755 --- a/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer_mac.cc +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer_mac.cc @@ -41,29 +41,6 @@ bool CFStringRefToUtf8(const CFStringRef string, std::string* str_utf8) { return true; } -// DesktopFrame that stores data in CFData. -class CFDataDesktopFrame : public DesktopFrame { - public: - // Consumes |cf_data| reference. - // - // TODO(sergeyu): Here we const_cast<> the buffer used in CFDataRef. CFDataRef - // buffer is immutable, but DesktopFrame is always mutable. This shouldn't be - // a problem because frames generated by WindowCapturers are normally not - // mutated. To avoid this hack consider making DesktopFrame immutable and add - // MutableDesktopFrame. - CFDataDesktopFrame(DesktopSize size, int stride, CFDataRef cf_data) - : DesktopFrame(size, stride, - const_cast(CFDataGetBytePtr(cf_data)), NULL), - cf_data_(cf_data) { - } - virtual ~CFDataDesktopFrame() { - CFRelease(cf_data_); - } - - private: - CFDataRef cf_data_; -}; - class WindowCapturerMac : public WindowCapturer { public: WindowCapturerMac(); @@ -185,9 +162,18 @@ void WindowCapturerMac::Capture(const DesktopRegion& region) { int width = CGImageGetWidth(window_image); int height = CGImageGetHeight(window_image); CGDataProviderRef provider = CGImageGetDataProvider(window_image); - DesktopFrame* frame = new CFDataDesktopFrame( - DesktopSize(width, height), CGImageGetBytesPerRow(window_image), - CGDataProviderCopyData(provider)); + CFDataRef cf_data = CGDataProviderCopyData(provider); + DesktopFrame* frame = new BasicDesktopFrame( + DesktopSize(width, height)); + + int src_stride = CGImageGetBytesPerRow(window_image); + const uint8_t* src_data = CFDataGetBytePtr(cf_data); + for (int y = 0; y < height; ++y) { + memcpy(frame->data() + frame->stride() * y, src_data + src_stride * y, + DesktopFrame::kBytesPerPixel * width); + } + + CFRelease(cf_data); CFRelease(window_image); callback_->OnCaptureCompleted(frame); @@ -196,7 +182,7 @@ void WindowCapturerMac::Capture(const DesktopRegion& region) { } // namespace // static -WindowCapturer* WindowCapturer::Create() { +WindowCapturer* WindowCapturer::Create(const DesktopCaptureOptions& options) { return new WindowCapturerMac(); } diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer_null.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer_null.cc index 8ea723aaa6e7..7bb1247ea9df 100755 --- a/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer_null.cc +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer_null.cc @@ -69,7 +69,7 @@ void WindowCapturerNull::Capture(const DesktopRegion& region) { } // namespace // static -WindowCapturer* WindowCapturer::Create() { +WindowCapturer* WindowCapturer::Create(const DesktopCaptureOptions& options) { return new WindowCapturerNull(); } diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer_unittest.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer_unittest.cc index c6de16ac420f..ad60a4bd1790 100644 --- a/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer_unittest.cc @@ -10,9 +10,8 @@ #include "webrtc/modules/desktop_capture/window_capturer.h" -#include - #include "gtest/gtest.h" +#include "webrtc/modules/desktop_capture/desktop_capture_options.h" #include "webrtc/modules/desktop_capture/desktop_frame.h" #include "webrtc/modules/desktop_capture/desktop_region.h" #include "webrtc/system_wrappers/interface/logging.h" @@ -24,7 +23,8 @@ class WindowCapturerTest : public testing::Test, public DesktopCapturer::Callback { public: void SetUp() OVERRIDE { - capturer_.reset(WindowCapturer::Create()); + capturer_.reset( + WindowCapturer::Create(DesktopCaptureOptions::CreateDefault())); } void TearDown() OVERRIDE { diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer_win.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer_win.cc index da93f5fce02b..d65c625ea28a 100644 --- a/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer_win.cc +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer_win.cc @@ -102,19 +102,19 @@ class WindowCapturerWin : public WindowCapturer { // HWND and HDC for the currently selected window or NULL if window is not // selected. HWND window_; - HDC window_dc_; // dwmapi.dll is used to determine if desktop compositing is enabled. HMODULE dwmapi_library_; DwmIsCompositionEnabledFunc is_composition_enabled_func_; + DesktopSize previous_size_; + DISALLOW_COPY_AND_ASSIGN(WindowCapturerWin); }; WindowCapturerWin::WindowCapturerWin() : callback_(NULL), - window_(NULL), - window_dc_(NULL) { + window_(NULL) { // Try to load dwmapi.dll dynamically since it is not available on XP. dwmapi_library_ = LoadLibrary(L"dwmapi.dll"); if (dwmapi_library_) { @@ -149,17 +149,11 @@ bool WindowCapturerWin::GetWindowList(WindowList* windows) { } bool WindowCapturerWin::SelectWindow(WindowId id) { - if (window_dc_) - ReleaseDC(window_, window_dc_); - - window_ = reinterpret_cast(id); - window_dc_ = GetWindowDC(window_); - if (!window_dc_) { - LOG(LS_WARNING) << "Failed to select window: " << GetLastError(); - window_ = NULL; + HWND window = reinterpret_cast(id); + if (!IsWindow(window) || !IsWindowVisible(window) || IsIconic(window)) return false; - } - + window_ = window; + previous_size_.set(0, 0); return true; } @@ -171,13 +165,17 @@ void WindowCapturerWin::Start(Callback* callback) { } void WindowCapturerWin::Capture(const DesktopRegion& region) { - if (!window_dc_) { + if (!window_) { LOG(LS_ERROR) << "Window hasn't been selected: " << GetLastError(); callback_->OnCaptureCompleted(NULL); return; } - assert(window_); + // Stop capturing if the window has been minimized or hidden. + if (IsIconic(window_) || !IsWindowVisible(window_)) { + callback_->OnCaptureCompleted(NULL); + return; + } RECT rect; if (!GetWindowRect(window_, &rect)) { @@ -186,16 +184,24 @@ void WindowCapturerWin::Capture(const DesktopRegion& region) { return; } - scoped_ptr frame(DesktopFrameWin::Create( - DesktopSize(rect.right - rect.left, rect.bottom - rect.top), - NULL, window_dc_)); - if (!frame.get()) { + HDC window_dc = GetWindowDC(window_); + if (!window_dc) { + LOG(LS_WARNING) << "Failed to get window DC: " << GetLastError(); callback_->OnCaptureCompleted(NULL); return; } - HDC mem_dc = CreateCompatibleDC(window_dc_); - SelectObject(mem_dc, frame->bitmap()); + scoped_ptr frame(DesktopFrameWin::Create( + DesktopSize(rect.right - rect.left, rect.bottom - rect.top), + NULL, window_dc)); + if (!frame.get()) { + ReleaseDC(window_, window_dc); + callback_->OnCaptureCompleted(NULL); + return; + } + + HDC mem_dc = CreateCompatibleDC(window_dc); + HGDIOBJ previous_object = SelectObject(mem_dc, frame->bitmap()); BOOL result = FALSE; // When desktop composition (Aero) is enabled each window is rendered to a @@ -203,21 +209,33 @@ void WindowCapturerWin::Capture(const DesktopRegion& region) { // window is occluded. PrintWindow() is slower but lets rendering the window // contents to an off-screen device context when Aero is not available. // PrintWindow() is not supported by some applications. - + // // If Aero is enabled, we prefer BitBlt() because it's faster and avoids // window flickering. Otherwise, we prefer PrintWindow() because BitBlt() may // render occluding windows on top of the desired window. + // + // When composition is enabled the DC returned by GetWindowDC() doesn't always + // have window frame rendered correctly. Windows renders it only once and then + // caches the result between captures. We hack it around by calling + // PrintWindow() whenever window size changes - it somehow affects what we + // get from BitBlt() on the subsequent captures. - if (!IsAeroEnabled()) + if (!IsAeroEnabled() || + (!previous_size_.is_empty() && !previous_size_.equals(frame->size()))) { result = PrintWindow(window_, mem_dc, 0); + } // Aero is enabled or PrintWindow() failed, use BitBlt. if (!result) { result = BitBlt(mem_dc, 0, 0, frame->size().width(), frame->size().height(), - window_dc_, 0, 0, SRCCOPY); + window_dc, 0, 0, SRCCOPY); } + SelectObject(mem_dc, previous_object); DeleteDC(mem_dc); + ReleaseDC(window_, window_dc); + + previous_size_ = frame->size(); if (!result) { LOG(LS_ERROR) << "Both PrintWindow() and BitBlt() failed."; @@ -230,7 +248,7 @@ void WindowCapturerWin::Capture(const DesktopRegion& region) { } // namespace // static -WindowCapturer* WindowCapturer::Create() { +WindowCapturer* WindowCapturer::Create(const DesktopCaptureOptions& options) { return new WindowCapturerWin(); } diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer_x11.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer_x11.cc index adafa9450d40..5a14356fd16f 100755 --- a/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer_x11.cc +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer_x11.cc @@ -18,11 +18,14 @@ #include #include +#include "webrtc/modules/desktop_capture/desktop_capture_options.h" #include "webrtc/modules/desktop_capture/desktop_frame.h" +#include "webrtc/modules/desktop_capture/x11/shared_x_display.h" #include "webrtc/modules/desktop_capture/x11/x_error_trap.h" #include "webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.h" #include "webrtc/system_wrappers/interface/logging.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/system_wrappers/interface/scoped_refptr.h" namespace webrtc { @@ -83,7 +86,7 @@ class XWindowProperty { class WindowCapturerLinux : public WindowCapturer { public: - WindowCapturerLinux(); + WindowCapturerLinux(const DesktopCaptureOptions& options); virtual ~WindowCapturerLinux(); // WindowCapturer interface. @@ -95,6 +98,8 @@ class WindowCapturerLinux : public WindowCapturer { virtual void Capture(const DesktopRegion& region) OVERRIDE; private: + Display* display() { return x_display_->display(); } + // Iterates through |window| hierarchy to find first visible window, i.e. one // that has WM_STATE property set to NormalState. // See http://tronche.com/gui/x/icccm/sec-4.html#s-4.1.3.1 . @@ -108,7 +113,7 @@ class WindowCapturerLinux : public WindowCapturer { Callback* callback_; - Display* display_; + scoped_refptr x_display_; Atom wm_state_atom_; Atom window_type_atom_; @@ -121,26 +126,20 @@ class WindowCapturerLinux : public WindowCapturer { DISALLOW_COPY_AND_ASSIGN(WindowCapturerLinux); }; -WindowCapturerLinux::WindowCapturerLinux() +WindowCapturerLinux::WindowCapturerLinux(const DesktopCaptureOptions& options) : callback_(NULL), - display_(NULL), + x_display_(options.x_display()), has_composite_extension_(false), selected_window_(0) { - display_ = XOpenDisplay(NULL); - if (!display_) { - LOG(LS_ERROR) << "Failed to open display."; - return; - } - // Create Atoms so we don't need to do it every time they are used. - wm_state_atom_ = XInternAtom(display_, "WM_STATE", True); - window_type_atom_ = XInternAtom(display_, "_NET_WM_WINDOW_TYPE", True); + wm_state_atom_ = XInternAtom(display(), "WM_STATE", True); + window_type_atom_ = XInternAtom(display(), "_NET_WM_WINDOW_TYPE", True); normal_window_type_atom_ = XInternAtom( - display_, "_NET_WM_WINDOW_TYPE_NORMAL", True); + display(), "_NET_WM_WINDOW_TYPE_NORMAL", True); int event_base, error_base, major_version, minor_version; - if (XCompositeQueryExtension(display_, &event_base, &error_base) && - XCompositeQueryVersion(display_, &major_version, &minor_version) && + if (XCompositeQueryExtension(display(), &event_base, &error_base) && + XCompositeQueryVersion(display(), &major_version, &minor_version) && // XCompositeNameWindowPixmap() requires version 0.2 (major_version > 0 || minor_version >= 2)) { has_composite_extension_ = true; @@ -149,26 +148,20 @@ WindowCapturerLinux::WindowCapturerLinux() } } -WindowCapturerLinux::~WindowCapturerLinux() { - if (display_) - XCloseDisplay(display_); -} +WindowCapturerLinux::~WindowCapturerLinux() {} bool WindowCapturerLinux::GetWindowList(WindowList* windows) { - if (!display_) - return false; - WindowList result; - XErrorTrap error_trap(display_); + XErrorTrap error_trap(display()); - int num_screens = XScreenCount(display_); + int num_screens = XScreenCount(display()); for (int screen = 0; screen < num_screens; ++screen) { - ::Window root_window = XRootWindow(display_, screen); + ::Window root_window = XRootWindow(display(), screen); ::Window parent; ::Window *children; unsigned int num_children; - int status = XQueryTree(display_, root_window, &root_window, &parent, + int status = XQueryTree(display(), root_window, &root_window, &parent, &children, &num_children); if (status == 0) { LOG(LS_ERROR) << "Failed to query for child windows for screen " @@ -198,7 +191,7 @@ bool WindowCapturerLinux::GetWindowList(WindowList* windows) { } bool WindowCapturerLinux::SelectWindow(WindowId id) { - if (!x_server_pixel_buffer_.Init(display_, id)) + if (!x_server_pixel_buffer_.Init(display(), id)) return false; selected_window_ = id; @@ -210,7 +203,7 @@ bool WindowCapturerLinux::SelectWindow(WindowId id) { // Redirect drawing to an offscreen buffer (ie, turn on compositing). X11 // remembers who has requested this and will turn it off for us when we exit. - XCompositeRedirectWindow(display_, id, CompositeRedirectAutomatic); + XCompositeRedirectWindow(display(), id, CompositeRedirectAutomatic); return true; } @@ -244,7 +237,7 @@ void WindowCapturerLinux::Capture(const DesktopRegion& region) { ::Window WindowCapturerLinux::GetApplicationWindow(::Window window) { // Get WM_STATE property of the window. - XWindowProperty window_state(display_, window, wm_state_atom_); + XWindowProperty window_state(display(), window, wm_state_atom_); // WM_STATE is considered to be set to WithdrawnState when it missing. int32_t state = window_state.is_valid() ? @@ -262,7 +255,7 @@ void WindowCapturerLinux::Capture(const DesktopRegion& region) { ::Window root, parent; ::Window *children; unsigned int num_children; - if (!XQueryTree(display_, window, &root, &parent, &children, + if (!XQueryTree(display(), window, &root, &parent, &children, &num_children)) { LOG(LS_ERROR) << "Failed to query for child windows although window" << "does not have a valid WM_STATE."; @@ -289,7 +282,7 @@ bool WindowCapturerLinux::IsDesktopElement(::Window window) { // says this hint *should* be present on all windows, and we use the existence // of _NET_WM_WINDOW_TYPE_NORMAL in the property to indicate a window is not // a desktop element (that is, only "normal" windows should be shareable). - XWindowProperty window_type(display_, window, window_type_atom_); + XWindowProperty window_type(display(), window, window_type_atom_); if (window_type.is_valid() && window_type.size() > 0) { uint32_t* end = window_type.data() + window_type.size(); bool is_normal = (end != std::find( @@ -299,7 +292,7 @@ bool WindowCapturerLinux::IsDesktopElement(::Window window) { // Fall back on using the hint. XClassHint class_hint; - Status status = XGetClassHint(display_, window, &class_hint); + Status status = XGetClassHint(display(), window, &class_hint); bool result = false; if (status == 0) { // No hints, assume this is a normal application window. @@ -321,11 +314,11 @@ bool WindowCapturerLinux::GetWindowTitle(::Window window, std::string* title) { XTextProperty window_name; window_name.value = NULL; if (window) { - status = XGetWMName(display_, window, &window_name); + status = XGetWMName(display(), window, &window_name); if (status && window_name.value && window_name.nitems) { int cnt; char **list = NULL; - status = Xutf8TextPropertyToTextList(display_, &window_name, &list, + status = Xutf8TextPropertyToTextList(display(), &window_name, &list, &cnt); if (status >= Success && cnt && *list) { if (cnt > 1) { @@ -347,8 +340,10 @@ bool WindowCapturerLinux::GetWindowTitle(::Window window, std::string* title) { } // namespace // static -WindowCapturer* WindowCapturer::Create() { - return new WindowCapturerLinux(); +WindowCapturer* WindowCapturer::Create(const DesktopCaptureOptions& options) { + if (!options.x_display()) + return NULL; + return new WindowCapturerLinux(options); } } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/x11/shared_x_display.cc b/media/webrtc/trunk/webrtc/modules/desktop_capture/x11/shared_x_display.cc new file mode 100644 index 000000000000..05b7f572e0b2 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/x11/shared_x_display.cc @@ -0,0 +1,87 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/desktop_capture/x11/shared_x_display.h" + +#include + +#include "webrtc/system_wrappers/interface/logging.h" + +namespace webrtc { + +SharedXDisplay::SharedXDisplay(Display* display) + : display_(display) { + assert(display_); +} + +SharedXDisplay::~SharedXDisplay() { + assert(event_handlers_.empty()); + XCloseDisplay(display_); +} + +// static +scoped_refptr SharedXDisplay::Create( + const std::string& display_name) { + Display* display = + XOpenDisplay(display_name.empty() ? NULL : display_name.c_str()); + if (!display) { + LOG(LS_ERROR) << "Unable to open display"; + return NULL; + } + return new SharedXDisplay(display); +} + +// static +scoped_refptr SharedXDisplay::CreateDefault() { + return Create(std::string()); +} + +void SharedXDisplay::AddEventHandler(int type, XEventHandler* handler) { + event_handlers_[type].push_back(handler); +} + +void SharedXDisplay::RemoveEventHandler(int type, XEventHandler* handler) { + EventHandlersMap::iterator handlers = event_handlers_.find(type); + if (handlers == event_handlers_.end()) + return; + + std::vector::iterator new_end = + std::remove(handlers->second.begin(), handlers->second.end(), handler); + handlers->second.erase(new_end, handlers->second.end()); + + // Check if no handlers left for this event. + if (handlers->second.empty()) + event_handlers_.erase(handlers); +} + +void SharedXDisplay::ProcessPendingXEvents() { + // Hold reference to |this| to prevent it from being destroyed while + // processing events. + scoped_refptr self(this); + + // Find the number of events that are outstanding "now." We don't just loop + // on XPending because we want to guarantee this terminates. + int events_to_process = XPending(display()); + XEvent e; + + for (int i = 0; i < events_to_process; i++) { + XNextEvent(display(), &e); + EventHandlersMap::iterator handlers = event_handlers_.find(e.type); + if (handlers == event_handlers_.end()) + continue; + for (std::vector::iterator it = handlers->second.begin(); + it != handlers->second.end(); ++it) { + if ((*it)->HandleXEvent(e)) + break; + } + } +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/desktop_capture/x11/shared_x_display.h b/media/webrtc/trunk/webrtc/modules/desktop_capture/x11/shared_x_display.h new file mode 100644 index 000000000000..81b5ef6606a2 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/desktop_capture/x11/shared_x_display.h @@ -0,0 +1,83 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_X11_SHARED_X_DISPLAY_H_ +#define WEBRTC_MODULES_DESKTOP_CAPTURE_X11_SHARED_X_DISPLAY_H_ + +#include +#include + +#include +#include + +#include + +#include "webrtc/system_wrappers/interface/atomic32.h" +#include "webrtc/system_wrappers/interface/scoped_refptr.h" + +namespace webrtc { + +// A ref-counted object to store XDisplay connection. +class SharedXDisplay { + public: + class XEventHandler { + public: + virtual ~XEventHandler() {} + + // Processes XEvent. Returns true if the event has been handled. + virtual bool HandleXEvent(const XEvent& event) = 0; + }; + + // Takes ownership of |display|. + explicit SharedXDisplay(Display* display); + + // Creates a new X11 Display for the |display_name|. NULL is returned if X11 + // connection failed. Equivalent to CreateDefault() when |display_name| is + // empty. + static scoped_refptr Create(const std::string& display_name); + + // Creates X11 Display connection for the default display (e.g. specified in + // DISPLAY). NULL is returned if X11 connection failed. + static scoped_refptr CreateDefault(); + + void AddRef() { ++ref_count_; } + void Release() { + if (--ref_count_ == 0) + delete this; + } + + Display* display() { return display_; } + + // Adds a new event |handler| for XEvent's of |type|. + void AddEventHandler(int type, XEventHandler* handler); + + // Removes event |handler| added using |AddEventHandler|. Doesn't do anything + // if |handler| is not registered. + void RemoveEventHandler(int type, XEventHandler* handler); + + // Processes pending XEvents, calling corresponding event handlers. + void ProcessPendingXEvents(); + + private: + typedef std::map > EventHandlersMap; + + ~SharedXDisplay(); + + Atomic32 ref_count_; + Display* display_; + + EventHandlersMap event_handlers_; + + DISALLOW_COPY_AND_ASSIGN(SharedXDisplay); +}; + +} // namespace webrtc + +#endif // WEBRTC_MODULES_DESKTOP_CAPTURE_X11_SHARED_X_DISPLAY_H_ diff --git a/media/webrtc/trunk/webrtc/modules/interface/module.h b/media/webrtc/trunk/webrtc/modules/interface/module.h index d9932add52c3..c177fd13e275 100644 --- a/media/webrtc/trunk/webrtc/modules/interface/module.h +++ b/media/webrtc/trunk/webrtc/modules/interface/module.h @@ -13,7 +13,7 @@ #include -#include "typedefs.h" +#include "webrtc/typedefs.h" namespace webrtc { diff --git a/media/webrtc/trunk/webrtc/modules/interface/module_common_types.h b/media/webrtc/trunk/webrtc/modules/interface/module_common_types.h index b663d9ae6c30..737242e0a3a5 100644 --- a/media/webrtc/trunk/webrtc/modules/interface/module_common_types.h +++ b/media/webrtc/trunk/webrtc/modules/interface/module_common_types.h @@ -12,7 +12,7 @@ #define MODULE_COMMON_TYPES_H #include -#include // memcpy +#include // memcpy #include @@ -21,73 +21,80 @@ #include "webrtc/typedefs.h" #ifdef _WIN32 - #pragma warning(disable:4351) // remove warning "new behavior: elements of array - // 'array' will be default initialized" +// Remove warning "new behavior: elements of array will be default initialized". +#pragma warning(disable : 4351) #endif namespace webrtc { -struct RTPHeaderExtension -{ - int32_t transmissionTimeOffset; - uint32_t absoluteSendTime; +struct RTPHeaderExtension { + bool hasTransmissionTimeOffset; + int32_t transmissionTimeOffset; + bool hasAbsoluteSendTime; + uint32_t absoluteSendTime; }; -struct RTPHeader -{ - bool markerBit; - uint8_t payloadType; - uint16_t sequenceNumber; - uint32_t timestamp; - uint32_t ssrc; - uint8_t numCSRCs; - uint32_t arrOfCSRCs[kRtpCsrcSize]; - uint8_t paddingLength; - uint16_t headerLength; - int payload_type_frequency; - RTPHeaderExtension extension; +struct RTPHeader { + bool markerBit; + uint8_t payloadType; + uint16_t sequenceNumber; + uint32_t timestamp; + uint32_t ssrc; + uint8_t numCSRCs; + uint32_t arrOfCSRCs[kRtpCsrcSize]; + uint8_t paddingLength; + uint16_t headerLength; + int payload_type_frequency; + RTPHeaderExtension extension; }; -struct RTPAudioHeader -{ - uint8_t numEnergy; // number of valid entries in arrOfEnergy - uint8_t arrOfEnergy[kRtpCsrcSize]; // one energy byte (0-9) per channel - bool isCNG; // is this CNG - uint8_t channel; // number of channels 2 = stereo +struct RTPAudioHeader { + uint8_t numEnergy; // number of valid entries in arrOfEnergy + uint8_t arrOfEnergy[kRtpCsrcSize]; // one energy byte (0-9) per channel + bool isCNG; // is this CNG + uint8_t channel; // number of channels 2 = stereo }; -enum {kNoPictureId = -1}; -enum {kNoTl0PicIdx = -1}; -enum {kNoTemporalIdx = -1}; -enum {kNoKeyIdx = -1}; -enum {kNoSimulcastIdx = 0}; +enum { + kNoPictureId = -1 +}; +enum { + kNoTl0PicIdx = -1 +}; +enum { + kNoTemporalIdx = -1 +}; +enum { + kNoKeyIdx = -1 +}; +enum { + kNoSimulcastIdx = 0 +}; -struct RTPVideoHeaderVP8 -{ - void InitRTPVideoHeaderVP8() - { - nonReference = false; - pictureId = kNoPictureId; - tl0PicIdx = kNoTl0PicIdx; - temporalIdx = kNoTemporalIdx; - layerSync = false; - keyIdx = kNoKeyIdx; - partitionId = 0; - beginningOfPartition = false; - } +struct RTPVideoHeaderVP8 { + void InitRTPVideoHeaderVP8() { + nonReference = false; + pictureId = kNoPictureId; + tl0PicIdx = kNoTl0PicIdx; + temporalIdx = kNoTemporalIdx; + layerSync = false; + keyIdx = kNoKeyIdx; + partitionId = 0; + beginningOfPartition = false; + } - bool nonReference; // Frame is discardable. - int16_t pictureId; // Picture ID index, 15 bits; - // kNoPictureId if PictureID does not exist. - int16_t tl0PicIdx; // TL0PIC_IDX, 8 bits; - // kNoTl0PicIdx means no value provided. - int8_t temporalIdx; // Temporal layer index, or kNoTemporalIdx. - bool layerSync; // This frame is a layer sync frame. - // Disabled if temporalIdx == kNoTemporalIdx. - int keyIdx; // 5 bits; kNoKeyIdx means not used. - int partitionId; // VP8 partition ID - bool beginningOfPartition; // True if this packet is the first - // in a VP8 partition. Otherwise false + bool nonReference; // Frame is discardable. + int16_t pictureId; // Picture ID index, 15 bits; + // kNoPictureId if PictureID does not exist. + int16_t tl0PicIdx; // TL0PIC_IDX, 8 bits; + // kNoTl0PicIdx means no value provided. + int8_t temporalIdx; // Temporal layer index, or kNoTemporalIdx. + bool layerSync; // This frame is a layer sync frame. + // Disabled if temporalIdx == kNoTemporalIdx. + int keyIdx; // 5 bits; kNoKeyIdx means not used. + int partitionId; // VP8 partition ID + bool beginningOfPartition; // True if this packet is the first + // in a VP8 partition. Otherwise false }; struct RTPVideoHeaderH264 { @@ -95,217 +102,193 @@ struct RTPVideoHeaderH264 { bool single_nalu; }; -union RTPVideoTypeHeader -{ - RTPVideoHeaderVP8 VP8; - RTPVideoHeaderH264 H264; +union RTPVideoTypeHeader { + RTPVideoHeaderVP8 VP8; + RTPVideoHeaderH264 H264; }; -enum RtpVideoCodecTypes -{ - kRtpVideoNone, - kRtpVideoGeneric, - kRtpVideoVp8, - kRtpVideoH264, +enum RtpVideoCodecTypes { + kRtpVideoNone, + kRtpVideoGeneric, + kRtpVideoVp8, + kRtpVideoH264 +}; +struct RTPVideoHeader { + uint16_t width; // size + uint16_t height; + + bool isFirstPacket; // first packet in frame + uint8_t simulcastIdx; // Index if the simulcast encoder creating + // this frame, 0 if not using simulcast. + RtpVideoCodecTypes codec; + RTPVideoTypeHeader codecHeader; +}; +union RTPTypeHeader { + RTPAudioHeader Audio; + RTPVideoHeader Video; }; -struct RTPVideoHeader -{ - uint16_t width; // size - uint16_t height; - - bool isFirstPacket; // first packet in frame - uint8_t simulcastIdx; // Index if the simulcast encoder creating - // this frame, 0 if not using simulcast. - RtpVideoCodecTypes codec; - RTPVideoTypeHeader codecHeader; -}; -union RTPTypeHeader -{ - RTPAudioHeader Audio; - RTPVideoHeader Video; +struct WebRtcRTPHeader { + RTPHeader header; + FrameType frameType; + RTPTypeHeader type; }; -struct WebRtcRTPHeader -{ - RTPHeader header; - FrameType frameType; - RTPTypeHeader type; -}; - -class RTPFragmentationHeader -{ -public: - RTPFragmentationHeader() : - fragmentationVectorSize(0), +class RTPFragmentationHeader { + public: + RTPFragmentationHeader() + : fragmentationVectorSize(0), fragmentationOffset(NULL), fragmentationLength(NULL), fragmentationTimeDiff(NULL), - fragmentationPlType(NULL) - {}; + fragmentationPlType(NULL) {}; - ~RTPFragmentationHeader() - { - delete [] fragmentationOffset; - delete [] fragmentationLength; - delete [] fragmentationTimeDiff; - delete [] fragmentationPlType; + ~RTPFragmentationHeader() { + delete[] fragmentationOffset; + delete[] fragmentationLength; + delete[] fragmentationTimeDiff; + delete[] fragmentationPlType; + } + + void CopyFrom(const RTPFragmentationHeader& src) { + if (this == &src) { + return; } - void CopyFrom(const RTPFragmentationHeader& src) - { - if(this == &src) - { - return; + if (src.fragmentationVectorSize != fragmentationVectorSize) { + // new size of vectors + + // delete old + delete[] fragmentationOffset; + fragmentationOffset = NULL; + delete[] fragmentationLength; + fragmentationLength = NULL; + delete[] fragmentationTimeDiff; + fragmentationTimeDiff = NULL; + delete[] fragmentationPlType; + fragmentationPlType = NULL; + + if (src.fragmentationVectorSize > 0) { + // allocate new + if (src.fragmentationOffset) { + fragmentationOffset = new uint32_t[src.fragmentationVectorSize]; } - - if(src.fragmentationVectorSize != fragmentationVectorSize) - { - // new size of vectors - - // delete old - delete [] fragmentationOffset; - fragmentationOffset = NULL; - delete [] fragmentationLength; - fragmentationLength = NULL; - delete [] fragmentationTimeDiff; - fragmentationTimeDiff = NULL; - delete [] fragmentationPlType; - fragmentationPlType = NULL; - - if(src.fragmentationVectorSize > 0) - { - // allocate new - if(src.fragmentationOffset) - { - fragmentationOffset = new uint32_t[src.fragmentationVectorSize]; - } - if(src.fragmentationLength) - { - fragmentationLength = new uint32_t[src.fragmentationVectorSize]; - } - if(src.fragmentationTimeDiff) - { - fragmentationTimeDiff = new uint16_t[src.fragmentationVectorSize]; - } - if(src.fragmentationPlType) - { - fragmentationPlType = new uint8_t[src.fragmentationVectorSize]; - } - } - // set new size - fragmentationVectorSize = src.fragmentationVectorSize; + if (src.fragmentationLength) { + fragmentationLength = new uint32_t[src.fragmentationVectorSize]; } - - if(src.fragmentationVectorSize > 0) - { - // copy values - if(src.fragmentationOffset) - { - memcpy(fragmentationOffset, src.fragmentationOffset, - src.fragmentationVectorSize * sizeof(uint32_t)); - } - if(src.fragmentationLength) - { - memcpy(fragmentationLength, src.fragmentationLength, - src.fragmentationVectorSize * sizeof(uint32_t)); - } - if(src.fragmentationTimeDiff) - { - memcpy(fragmentationTimeDiff, src.fragmentationTimeDiff, - src.fragmentationVectorSize * sizeof(uint16_t)); - } - if(src.fragmentationPlType) - { - memcpy(fragmentationPlType, src.fragmentationPlType, - src.fragmentationVectorSize * sizeof(uint8_t)); - } + if (src.fragmentationTimeDiff) { + fragmentationTimeDiff = new uint16_t[src.fragmentationVectorSize]; } + if (src.fragmentationPlType) { + fragmentationPlType = new uint8_t[src.fragmentationVectorSize]; + } + } + // set new size + fragmentationVectorSize = src.fragmentationVectorSize; } - void VerifyAndAllocateFragmentationHeader(const uint16_t size) - { - if(fragmentationVectorSize < size) - { - uint16_t oldVectorSize = fragmentationVectorSize; - { - // offset - uint32_t* oldOffsets = fragmentationOffset; - fragmentationOffset = new uint32_t[size]; - memset(fragmentationOffset+oldVectorSize, 0, - sizeof(uint32_t)*(size-oldVectorSize)); - // copy old values - memcpy(fragmentationOffset,oldOffsets, sizeof(uint32_t) * oldVectorSize); - delete[] oldOffsets; - } - // length - { - uint32_t* oldLengths = fragmentationLength; - fragmentationLength = new uint32_t[size]; - memset(fragmentationLength+oldVectorSize, 0, - sizeof(uint32_t) * (size- oldVectorSize)); - memcpy(fragmentationLength, oldLengths, - sizeof(uint32_t) * oldVectorSize); - delete[] oldLengths; - } - // time diff - { - uint16_t* oldTimeDiffs = fragmentationTimeDiff; - fragmentationTimeDiff = new uint16_t[size]; - memset(fragmentationTimeDiff+oldVectorSize, 0, - sizeof(uint16_t) * (size- oldVectorSize)); - memcpy(fragmentationTimeDiff, oldTimeDiffs, - sizeof(uint16_t) * oldVectorSize); - delete[] oldTimeDiffs; - } - // payload type - { - uint8_t* oldTimePlTypes = fragmentationPlType; - fragmentationPlType = new uint8_t[size]; - memset(fragmentationPlType+oldVectorSize, 0, - sizeof(uint8_t) * (size- oldVectorSize)); - memcpy(fragmentationPlType, oldTimePlTypes, - sizeof(uint8_t) * oldVectorSize); - delete[] oldTimePlTypes; - } - fragmentationVectorSize = size; - } + if (src.fragmentationVectorSize > 0) { + // copy values + if (src.fragmentationOffset) { + memcpy(fragmentationOffset, src.fragmentationOffset, + src.fragmentationVectorSize * sizeof(uint32_t)); + } + if (src.fragmentationLength) { + memcpy(fragmentationLength, src.fragmentationLength, + src.fragmentationVectorSize * sizeof(uint32_t)); + } + if (src.fragmentationTimeDiff) { + memcpy(fragmentationTimeDiff, src.fragmentationTimeDiff, + src.fragmentationVectorSize * sizeof(uint16_t)); + } + if (src.fragmentationPlType) { + memcpy(fragmentationPlType, src.fragmentationPlType, + src.fragmentationVectorSize * sizeof(uint8_t)); + } } + } - uint16_t fragmentationVectorSize; // Number of fragmentations - uint32_t* fragmentationOffset; // Offset of pointer to data for each fragm. - uint32_t* fragmentationLength; // Data size for each fragmentation - uint16_t* fragmentationTimeDiff; // Timestamp difference relative "now" for - // each fragmentation - uint8_t* fragmentationPlType; // Payload type of each fragmentation + void VerifyAndAllocateFragmentationHeader(const uint16_t size) { + if (fragmentationVectorSize < size) { + uint16_t oldVectorSize = fragmentationVectorSize; + { + // offset + uint32_t* oldOffsets = fragmentationOffset; + fragmentationOffset = new uint32_t[size]; + memset(fragmentationOffset + oldVectorSize, 0, + sizeof(uint32_t) * (size - oldVectorSize)); + // copy old values + memcpy(fragmentationOffset, oldOffsets, + sizeof(uint32_t) * oldVectorSize); + delete[] oldOffsets; + } + // length + { + uint32_t* oldLengths = fragmentationLength; + fragmentationLength = new uint32_t[size]; + memset(fragmentationLength + oldVectorSize, 0, + sizeof(uint32_t) * (size - oldVectorSize)); + memcpy(fragmentationLength, oldLengths, + sizeof(uint32_t) * oldVectorSize); + delete[] oldLengths; + } + // time diff + { + uint16_t* oldTimeDiffs = fragmentationTimeDiff; + fragmentationTimeDiff = new uint16_t[size]; + memset(fragmentationTimeDiff + oldVectorSize, 0, + sizeof(uint16_t) * (size - oldVectorSize)); + memcpy(fragmentationTimeDiff, oldTimeDiffs, + sizeof(uint16_t) * oldVectorSize); + delete[] oldTimeDiffs; + } + // payload type + { + uint8_t* oldTimePlTypes = fragmentationPlType; + fragmentationPlType = new uint8_t[size]; + memset(fragmentationPlType + oldVectorSize, 0, + sizeof(uint8_t) * (size - oldVectorSize)); + memcpy(fragmentationPlType, oldTimePlTypes, + sizeof(uint8_t) * oldVectorSize); + delete[] oldTimePlTypes; + } + fragmentationVectorSize = size; + } + } -private: - DISALLOW_COPY_AND_ASSIGN(RTPFragmentationHeader); + uint16_t fragmentationVectorSize; // Number of fragmentations + uint32_t* fragmentationOffset; // Offset of pointer to data for each fragm. + uint32_t* fragmentationLength; // Data size for each fragmentation + uint16_t* fragmentationTimeDiff; // Timestamp difference relative "now" for + // each fragmentation + uint8_t* fragmentationPlType; // Payload type of each fragmentation + + private: + DISALLOW_COPY_AND_ASSIGN(RTPFragmentationHeader); }; -struct RTCPVoIPMetric -{ - // RFC 3611 4.7 - uint8_t lossRate; - uint8_t discardRate; - uint8_t burstDensity; - uint8_t gapDensity; - uint16_t burstDuration; - uint16_t gapDuration; - uint16_t roundTripDelay; - uint16_t endSystemDelay; - uint8_t signalLevel; - uint8_t noiseLevel; - uint8_t RERL; - uint8_t Gmin; - uint8_t Rfactor; - uint8_t extRfactor; - uint8_t MOSLQ; - uint8_t MOSCQ; - uint8_t RXconfig; - uint16_t JBnominal; - uint16_t JBmax; - uint16_t JBabsMax; +struct RTCPVoIPMetric { + // RFC 3611 4.7 + uint8_t lossRate; + uint8_t discardRate; + uint8_t burstDensity; + uint8_t gapDensity; + uint16_t burstDuration; + uint16_t gapDuration; + uint16_t roundTripDelay; + uint16_t endSystemDelay; + uint8_t signalLevel; + uint8_t noiseLevel; + uint8_t RERL; + uint8_t Gmin; + uint8_t Rfactor; + uint8_t extRfactor; + uint8_t MOSLQ; + uint8_t MOSCQ; + uint8_t RXconfig; + uint16_t JBnominal; + uint16_t JBmax; + uint16_t JBabsMax; }; // Types for the FEC packet masks. The type |kFecMaskRandom| is based on a @@ -336,11 +319,10 @@ class CallStatsObserver { }; // class describing a complete, or parts of an encoded frame. -class EncodedVideoData -{ -public: - EncodedVideoData() : - payloadType(0), +class EncodedVideoData { + public: + EncodedVideoData() + : payloadType(0), timeStamp(0), renderTimeMs(0), encodedWidth(0), @@ -352,91 +334,79 @@ public: bufferSize(0), fragmentationHeader(), frameType(kVideoFrameDelta), - codec(kVideoCodecUnknown) - {}; + codec(kVideoCodecUnknown) {}; - EncodedVideoData(const EncodedVideoData& data) - { - payloadType = data.payloadType; - timeStamp = data.timeStamp; - renderTimeMs = data.renderTimeMs; - encodedWidth = data.encodedWidth; - encodedHeight = data.encodedHeight; - completeFrame = data.completeFrame; - missingFrame = data.missingFrame; - payloadSize = data.payloadSize; - fragmentationHeader.CopyFrom(data.fragmentationHeader); - frameType = data.frameType; - codec = data.codec; - if (data.payloadSize > 0) - { - payloadData = new uint8_t[data.payloadSize]; - memcpy(payloadData, data.payloadData, data.payloadSize); - } - else - { - payloadData = NULL; - } + EncodedVideoData(const EncodedVideoData& data) { + payloadType = data.payloadType; + timeStamp = data.timeStamp; + renderTimeMs = data.renderTimeMs; + encodedWidth = data.encodedWidth; + encodedHeight = data.encodedHeight; + completeFrame = data.completeFrame; + missingFrame = data.missingFrame; + payloadSize = data.payloadSize; + fragmentationHeader.CopyFrom(data.fragmentationHeader); + frameType = data.frameType; + codec = data.codec; + if (data.payloadSize > 0) { + payloadData = new uint8_t[data.payloadSize]; + memcpy(payloadData, data.payloadData, data.payloadSize); + } else { + payloadData = NULL; } + } + ~EncodedVideoData() { + delete[] payloadData; + }; - ~EncodedVideoData() - { - delete [] payloadData; - }; - - EncodedVideoData& operator=(const EncodedVideoData& data) - { - if (this == &data) - { - return *this; - } - payloadType = data.payloadType; - timeStamp = data.timeStamp; - renderTimeMs = data.renderTimeMs; - encodedWidth = data.encodedWidth; - encodedHeight = data.encodedHeight; - completeFrame = data.completeFrame; - missingFrame = data.missingFrame; - payloadSize = data.payloadSize; - fragmentationHeader.CopyFrom(data.fragmentationHeader); - frameType = data.frameType; - codec = data.codec; - if (data.payloadSize > 0) - { - delete [] payloadData; - payloadData = new uint8_t[data.payloadSize]; - memcpy(payloadData, data.payloadData, data.payloadSize); - bufferSize = data.payloadSize; - } - return *this; - }; - void VerifyAndAllocate( const uint32_t size) - { - if (bufferSize < size) - { - uint8_t* oldPayload = payloadData; - payloadData = new uint8_t[size]; - memcpy(payloadData, oldPayload, sizeof(uint8_t) * payloadSize); - - bufferSize = size; - delete[] oldPayload; - } + EncodedVideoData& operator=(const EncodedVideoData& data) { + if (this == &data) { + return *this; } + payloadType = data.payloadType; + timeStamp = data.timeStamp; + renderTimeMs = data.renderTimeMs; + encodedWidth = data.encodedWidth; + encodedHeight = data.encodedHeight; + completeFrame = data.completeFrame; + missingFrame = data.missingFrame; + payloadSize = data.payloadSize; + fragmentationHeader.CopyFrom(data.fragmentationHeader); + frameType = data.frameType; + codec = data.codec; + if (data.payloadSize > 0) { + delete[] payloadData; + payloadData = new uint8_t[data.payloadSize]; + memcpy(payloadData, data.payloadData, data.payloadSize); + bufferSize = data.payloadSize; + } + return *this; + }; + void VerifyAndAllocate(const uint32_t size) { + if (bufferSize < size) { + uint8_t* oldPayload = payloadData; + payloadData = new uint8_t[size]; + memcpy(payloadData, oldPayload, sizeof(uint8_t) * payloadSize); - uint8_t payloadType; - uint32_t timeStamp; - int64_t renderTimeMs; - uint32_t encodedWidth; - uint32_t encodedHeight; - bool completeFrame; - bool missingFrame; - uint8_t* payloadData; - uint32_t payloadSize; - uint32_t bufferSize; - RTPFragmentationHeader fragmentationHeader; - FrameType frameType; - VideoCodecType codec; + bufferSize = size; + delete[] oldPayload; + } + } + + uint8_t payloadType; + uint32_t timeStamp; + int64_t renderTimeMs; + uint32_t encodedWidth; + uint32_t encodedHeight; + bool completeFrame; + bool missingFrame; + uint8_t* payloadData; + uint32_t payloadSize; + uint32_t bufferSize; + RTPFragmentationHeader fragmentationHeader; + FrameType frameType; + VideoCodecType codec; }; struct VideoContentMetrics { @@ -444,8 +414,7 @@ struct VideoContentMetrics { : motion_magnitude(0.0f), spatial_pred_err(0.0f), spatial_pred_err_h(0.0f), - spatial_pred_err_v(0.0f) { - } + spatial_pred_err_v(0.0f) {} void Reset() { motion_magnitude = 0.0f; @@ -468,263 +437,229 @@ struct VideoContentMetrics { * * *************************************************/ -class VideoFrame -{ -public: - VideoFrame(); - ~VideoFrame(); - /** - * Verifies that current allocated buffer size is larger than or equal to the input size. - * If the current buffer size is smaller, a new allocation is made and the old buffer data - * is copied to the new buffer. - * Buffer size is updated to minimumSize. - */ - int32_t VerifyAndAllocate(const uint32_t minimumSize); - /** - * Update length of data buffer in frame. Function verifies that new length is less or - * equal to allocated size. - */ - int32_t SetLength(const uint32_t newLength); - /* - * Swap buffer and size data - */ - int32_t Swap(uint8_t*& newMemory, - uint32_t& newLength, - uint32_t& newSize); - /* - * Swap buffer and size data - */ - int32_t SwapFrame(VideoFrame& videoFrame); - /** - * Copy buffer: If newLength is bigger than allocated size, a new buffer of size length - * is allocated. - */ - int32_t CopyFrame(const VideoFrame& videoFrame); - /** - * Copy buffer: If newLength is bigger than allocated size, a new buffer of size length - * is allocated. - */ - int32_t CopyFrame(uint32_t length, const uint8_t* sourceBuffer); - /** - * Delete VideoFrame and resets members to zero - */ - void Free(); - /** - * Set frame timestamp (90kHz) - */ - void SetTimeStamp(const uint32_t timeStamp) {_timeStamp = timeStamp;} - /** - * Get pointer to frame buffer - */ - uint8_t* Buffer() const {return _buffer;} +class VideoFrame { + public: + VideoFrame(); + ~VideoFrame(); + /** + * Verifies that current allocated buffer size is larger than or equal to the + * input size. + * If the current buffer size is smaller, a new allocation is made and the old + * buffer data + * is copied to the new buffer. + * Buffer size is updated to minimumSize. + */ + int32_t VerifyAndAllocate(const uint32_t minimumSize); + /** + * Update length of data buffer in frame. Function verifies that new length + * is less or + * equal to allocated size. + */ + int32_t SetLength(const uint32_t newLength); + /* + * Swap buffer and size data + */ + int32_t Swap(uint8_t*& newMemory, uint32_t& newLength, uint32_t& newSize); + /* + * Swap buffer and size data + */ + int32_t SwapFrame(VideoFrame& videoFrame); + /** + * Copy buffer: If newLength is bigger than allocated size, a new buffer of + * size length + * is allocated. + */ + int32_t CopyFrame(const VideoFrame& videoFrame); + /** + * Copy buffer: If newLength is bigger than allocated size, a new buffer of + * size length + * is allocated. + */ + int32_t CopyFrame(uint32_t length, const uint8_t* sourceBuffer); + /** + * Delete VideoFrame and resets members to zero + */ + void Free(); + /** + * Set frame timestamp (90kHz) + */ + void SetTimeStamp(const uint32_t timeStamp) { _timeStamp = timeStamp; } + /** + * Get pointer to frame buffer + */ + uint8_t* Buffer() const { return _buffer; } - uint8_t*& Buffer() {return _buffer;} + uint8_t*& Buffer() { return _buffer; } - /** - * Get allocated buffer size - */ - uint32_t Size() const {return _bufferSize;} - /** - * Get frame length - */ - uint32_t Length() const {return _bufferLength;} - /** - * Get frame timestamp (90kHz) - */ - uint32_t TimeStamp() const {return _timeStamp;} - /** - * Get frame width - */ - uint32_t Width() const {return _width;} - /** - * Get frame height - */ - uint32_t Height() const {return _height;} - /** - * Set frame width - */ - void SetWidth(const uint32_t width) {_width = width;} - /** - * Set frame height - */ - void SetHeight(const uint32_t height) {_height = height;} - /** - * Set render time in miliseconds - */ - void SetRenderTime(const int64_t renderTimeMs) {_renderTimeMs = renderTimeMs;} - /** - * Get render time in miliseconds - */ - int64_t RenderTimeMs() const {return _renderTimeMs;} + /** + * Get allocated buffer size + */ + uint32_t Size() const { return _bufferSize; } + /** + * Get frame length + */ + uint32_t Length() const { return _bufferLength; } + /** + * Get frame timestamp (90kHz) + */ + uint32_t TimeStamp() const { return _timeStamp; } + /** + * Get frame width + */ + uint32_t Width() const { return _width; } + /** + * Get frame height + */ + uint32_t Height() const { return _height; } + /** + * Set frame width + */ + void SetWidth(const uint32_t width) { _width = width; } + /** + * Set frame height + */ + void SetHeight(const uint32_t height) { _height = height; } + /** + * Set render time in miliseconds + */ + void SetRenderTime(const int64_t renderTimeMs) { + _renderTimeMs = renderTimeMs; + } + /** + * Get render time in miliseconds + */ + int64_t RenderTimeMs() const { return _renderTimeMs; } -private: - void Set(uint8_t* buffer, - uint32_t size, - uint32_t length, - uint32_t timeStamp); + private: + void Set(uint8_t* buffer, uint32_t size, uint32_t length, uint32_t timeStamp); - uint8_t* _buffer; // Pointer to frame buffer - uint32_t _bufferSize; // Allocated buffer size - uint32_t _bufferLength; // Length (in bytes) of buffer - uint32_t _timeStamp; // Timestamp of frame (90kHz) - uint32_t _width; - uint32_t _height; - int64_t _renderTimeMs; -}; // end of VideoFrame class declaration + uint8_t* _buffer; // Pointer to frame buffer + uint32_t _bufferSize; // Allocated buffer size + uint32_t _bufferLength; // Length (in bytes) of buffer + uint32_t _timeStamp; // Timestamp of frame (90kHz) + uint32_t _width; + uint32_t _height; + int64_t _renderTimeMs; +}; // end of VideoFrame class declaration // inline implementation of VideoFrame class: -inline -VideoFrame::VideoFrame(): - _buffer(0), - _bufferSize(0), - _bufferLength(0), - _timeStamp(0), - _width(0), - _height(0), - _renderTimeMs(0) -{ - // +inline VideoFrame::VideoFrame() + : _buffer(0), + _bufferSize(0), + _bufferLength(0), + _timeStamp(0), + _width(0), + _height(0), + _renderTimeMs(0) { + // } -inline -VideoFrame::~VideoFrame() -{ - if(_buffer) - { - delete [] _buffer; - _buffer = NULL; +inline VideoFrame::~VideoFrame() { + if (_buffer) { + delete[] _buffer; + _buffer = NULL; + } +} + +inline int32_t VideoFrame::VerifyAndAllocate(const uint32_t minimumSize) { + if (minimumSize < 1) { + return -1; + } + if (minimumSize > _bufferSize) { + // create buffer of sufficient size + uint8_t* newBufferBuffer = new uint8_t[minimumSize]; + if (_buffer) { + // copy old data + memcpy(newBufferBuffer, _buffer, _bufferSize); + delete[] _buffer; + } else { + memset(newBufferBuffer, 0, minimumSize * sizeof(uint8_t)); } + _buffer = newBufferBuffer; + _bufferSize = minimumSize; + } + return 0; } +inline int32_t VideoFrame::SetLength(const uint32_t newLength) { + if (newLength > _bufferSize) { // can't accomodate new value + return -1; + } + _bufferLength = newLength; + return 0; +} -inline -int32_t -VideoFrame::VerifyAndAllocate(const uint32_t minimumSize) -{ - if (minimumSize < 1) - { - return -1; +inline int32_t VideoFrame::SwapFrame(VideoFrame& videoFrame) { + uint32_t tmpTimeStamp = _timeStamp; + uint32_t tmpWidth = _width; + uint32_t tmpHeight = _height; + int64_t tmpRenderTime = _renderTimeMs; + + _timeStamp = videoFrame._timeStamp; + _width = videoFrame._width; + _height = videoFrame._height; + _renderTimeMs = videoFrame._renderTimeMs; + + videoFrame._timeStamp = tmpTimeStamp; + videoFrame._width = tmpWidth; + videoFrame._height = tmpHeight; + videoFrame._renderTimeMs = tmpRenderTime; + + return Swap(videoFrame._buffer, videoFrame._bufferLength, + videoFrame._bufferSize); +} + +inline int32_t VideoFrame::Swap(uint8_t*& newMemory, uint32_t& newLength, + uint32_t& newSize) { + uint8_t* tmpBuffer = _buffer; + uint32_t tmpLength = _bufferLength; + uint32_t tmpSize = _bufferSize; + _buffer = newMemory; + _bufferLength = newLength; + _bufferSize = newSize; + newMemory = tmpBuffer; + newLength = tmpLength; + newSize = tmpSize; + return 0; +} + +inline int32_t VideoFrame::CopyFrame(uint32_t length, + const uint8_t* sourceBuffer) { + if (length > _bufferSize) { + int32_t ret = VerifyAndAllocate(length); + if (ret < 0) { + return ret; } - if(minimumSize > _bufferSize) - { - // create buffer of sufficient size - uint8_t* newBufferBuffer = new uint8_t[minimumSize]; - if(_buffer) - { - // copy old data - memcpy(newBufferBuffer, _buffer, _bufferSize); - delete [] _buffer; - } - else - { - memset(newBufferBuffer, 0, minimumSize * sizeof(uint8_t)); - } - _buffer = newBufferBuffer; - _bufferSize = minimumSize; - } - return 0; + } + memcpy(_buffer, sourceBuffer, length); + _bufferLength = length; + return 0; } -inline -int32_t -VideoFrame::SetLength(const uint32_t newLength) -{ - if (newLength >_bufferSize ) - { // can't accomodate new value - return -1; - } - _bufferLength = newLength; - return 0; +inline int32_t VideoFrame::CopyFrame(const VideoFrame& videoFrame) { + if (CopyFrame(videoFrame.Length(), videoFrame.Buffer()) != 0) { + return -1; + } + _timeStamp = videoFrame._timeStamp; + _width = videoFrame._width; + _height = videoFrame._height; + _renderTimeMs = videoFrame._renderTimeMs; + return 0; } -inline -int32_t -VideoFrame::SwapFrame(VideoFrame& videoFrame) -{ - uint32_t tmpTimeStamp = _timeStamp; - uint32_t tmpWidth = _width; - uint32_t tmpHeight = _height; - int64_t tmpRenderTime = _renderTimeMs; +inline void VideoFrame::Free() { + _timeStamp = 0; + _bufferLength = 0; + _bufferSize = 0; + _height = 0; + _width = 0; + _renderTimeMs = 0; - _timeStamp = videoFrame._timeStamp; - _width = videoFrame._width; - _height = videoFrame._height; - _renderTimeMs = videoFrame._renderTimeMs; - - videoFrame._timeStamp = tmpTimeStamp; - videoFrame._width = tmpWidth; - videoFrame._height = tmpHeight; - videoFrame._renderTimeMs = tmpRenderTime; - - return Swap(videoFrame._buffer, videoFrame._bufferLength, videoFrame._bufferSize); + if (_buffer) { + delete[] _buffer; + _buffer = NULL; + } } -inline -int32_t -VideoFrame::Swap(uint8_t*& newMemory, uint32_t& newLength, uint32_t& newSize) -{ - uint8_t* tmpBuffer = _buffer; - uint32_t tmpLength = _bufferLength; - uint32_t tmpSize = _bufferSize; - _buffer = newMemory; - _bufferLength = newLength; - _bufferSize = newSize; - newMemory = tmpBuffer; - newLength = tmpLength; - newSize = tmpSize; - return 0; -} - -inline -int32_t -VideoFrame::CopyFrame(uint32_t length, const uint8_t* sourceBuffer) -{ - if (length > _bufferSize) - { - int32_t ret = VerifyAndAllocate(length); - if (ret < 0) - { - return ret; - } - } - memcpy(_buffer, sourceBuffer, length); - _bufferLength = length; - return 0; -} - -inline -int32_t -VideoFrame::CopyFrame(const VideoFrame& videoFrame) -{ - if(CopyFrame(videoFrame.Length(), videoFrame.Buffer()) != 0) - { - return -1; - } - _timeStamp = videoFrame._timeStamp; - _width = videoFrame._width; - _height = videoFrame._height; - _renderTimeMs = videoFrame._renderTimeMs; - return 0; -} - -inline -void -VideoFrame::Free() -{ - _timeStamp = 0; - _bufferLength = 0; - _bufferSize = 0; - _height = 0; - _width = 0; - _renderTimeMs = 0; - - if(_buffer) - { - delete [] _buffer; - _buffer = NULL; - } -} - - /* This class holds up to 60 ms of super-wideband (32 kHz) stereo audio. It * allows for adding and subtracting frames while keeping track of the resulting * states. @@ -738,330 +673,236 @@ VideoFrame::Free() * - The +operator assume that you would never add exactly opposite frames when * deciding the resulting state. To do this use the -operator. */ -class AudioFrame -{ -public: - // Stereo, 32 kHz, 60 ms (2 * 32 * 60) - static const int kMaxDataSizeSamples = 3840; +class AudioFrame { + public: + // Stereo, 32 kHz, 60 ms (2 * 32 * 60) + static const int kMaxDataSizeSamples = 3840; - enum VADActivity - { - kVadActive = 0, - kVadPassive = 1, - kVadUnknown = 2 - }; - enum SpeechType - { - kNormalSpeech = 0, - kPLC = 1, - kCNG = 2, - kPLCCNG = 3, - kUndefined = 4 - }; + enum VADActivity { + kVadActive = 0, + kVadPassive = 1, + kVadUnknown = 2 + }; + enum SpeechType { + kNormalSpeech = 0, + kPLC = 1, + kCNG = 2, + kPLCCNG = 3, + kUndefined = 4 + }; - AudioFrame(); - virtual ~AudioFrame(); + AudioFrame(); + virtual ~AudioFrame() {} - void UpdateFrame( - int id, - uint32_t timestamp, - const int16_t* data, - int samples_per_channel, - int sample_rate_hz, - SpeechType speech_type, - VADActivity vad_activity, - int num_channels = 1, - uint32_t energy = -1); + // |interleaved_| is not changed by this method. + void UpdateFrame(int id, uint32_t timestamp, const int16_t* data, + int samples_per_channel, int sample_rate_hz, + SpeechType speech_type, VADActivity vad_activity, + int num_channels = 1, uint32_t energy = -1); - AudioFrame& Append(const AudioFrame& rhs); + AudioFrame& Append(const AudioFrame& rhs); - void CopyFrom(const AudioFrame& src); + void CopyFrom(const AudioFrame& src); - void Mute(); + void Mute(); - AudioFrame& operator>>=(const int rhs); - AudioFrame& operator+=(const AudioFrame& rhs); - AudioFrame& operator-=(const AudioFrame& rhs); + AudioFrame& operator>>=(const int rhs); + AudioFrame& operator+=(const AudioFrame& rhs); + AudioFrame& operator-=(const AudioFrame& rhs); - int id_; - uint32_t timestamp_; - int16_t data_[kMaxDataSizeSamples]; - int samples_per_channel_; - int sample_rate_hz_; - int num_channels_; - SpeechType speech_type_; - VADActivity vad_activity_; - uint32_t energy_; + int id_; + uint32_t timestamp_; + int16_t data_[kMaxDataSizeSamples]; + int samples_per_channel_; + int sample_rate_hz_; + int num_channels_; + SpeechType speech_type_; + VADActivity vad_activity_; + uint32_t energy_; + bool interleaved_; -private: - DISALLOW_COPY_AND_ASSIGN(AudioFrame); + private: + DISALLOW_COPY_AND_ASSIGN(AudioFrame); }; -inline -AudioFrame::AudioFrame() - : - id_(-1), - timestamp_(0), - data_(), - samples_per_channel_(0), - sample_rate_hz_(0), - num_channels_(1), - speech_type_(kUndefined), - vad_activity_(kVadUnknown), - energy_(0xffffffff) -{ +inline AudioFrame::AudioFrame() + : id_(-1), + timestamp_(0), + data_(), + samples_per_channel_(0), + sample_rate_hz_(0), + num_channels_(1), + speech_type_(kUndefined), + vad_activity_(kVadUnknown), + energy_(0xffffffff), + interleaved_(true) {} + +inline void AudioFrame::UpdateFrame(int id, uint32_t timestamp, + const int16_t* data, + int samples_per_channel, int sample_rate_hz, + SpeechType speech_type, + VADActivity vad_activity, int num_channels, + uint32_t energy) { + id_ = id; + timestamp_ = timestamp; + samples_per_channel_ = samples_per_channel; + sample_rate_hz_ = sample_rate_hz; + speech_type_ = speech_type; + vad_activity_ = vad_activity; + num_channels_ = num_channels; + energy_ = energy; + + const int length = samples_per_channel * num_channels; + assert(length <= kMaxDataSizeSamples && length >= 0); + if (data != NULL) { + memcpy(data_, data, sizeof(int16_t) * length); + } else { + memset(data_, 0, sizeof(int16_t) * length); + } } -inline -AudioFrame::~AudioFrame() -{ +inline void AudioFrame::CopyFrom(const AudioFrame& src) { + if (this == &src) return; + + id_ = src.id_; + timestamp_ = src.timestamp_; + samples_per_channel_ = src.samples_per_channel_; + sample_rate_hz_ = src.sample_rate_hz_; + speech_type_ = src.speech_type_; + vad_activity_ = src.vad_activity_; + num_channels_ = src.num_channels_; + energy_ = src.energy_; + interleaved_ = src.interleaved_; + + const int length = samples_per_channel_ * num_channels_; + assert(length <= kMaxDataSizeSamples && length >= 0); + memcpy(data_, src.data_, sizeof(int16_t) * length); } -inline -void -AudioFrame::UpdateFrame( - int id, - uint32_t timestamp, - const int16_t* data, - int samples_per_channel, - int sample_rate_hz, - SpeechType speech_type, - VADActivity vad_activity, - int num_channels, - uint32_t energy) -{ - id_ = id; - timestamp_ = timestamp; - samples_per_channel_ = samples_per_channel; - sample_rate_hz_ = sample_rate_hz; - speech_type_ = speech_type; - vad_activity_ = vad_activity; - num_channels_ = num_channels; - energy_ = energy; - - const int length = samples_per_channel * num_channels; - assert(length <= kMaxDataSizeSamples && length >= 0); - if(data != NULL) - { - memcpy(data_, data, sizeof(int16_t) * length); - } - else - { - memset(data_, 0, sizeof(int16_t) * length); - } -} - -inline void AudioFrame::CopyFrom(const AudioFrame& src) -{ - if(this == &src) - { - return; - } - id_ = src.id_; - timestamp_ = src.timestamp_; - samples_per_channel_ = src.samples_per_channel_; - sample_rate_hz_ = src.sample_rate_hz_; - speech_type_ = src.speech_type_; - vad_activity_ = src.vad_activity_; - num_channels_ = src.num_channels_; - energy_ = src.energy_; - - const int length = samples_per_channel_ * num_channels_; - assert(length <= kMaxDataSizeSamples && length >= 0); - memcpy(data_, src.data_, sizeof(int16_t) * length); -} - -inline -void -AudioFrame::Mute() -{ +inline void AudioFrame::Mute() { memset(data_, 0, samples_per_channel_ * num_channels_ * sizeof(int16_t)); } -inline -AudioFrame& -AudioFrame::operator>>=(const int rhs) -{ - assert((num_channels_ > 0) && (num_channels_ < 3)); - if((num_channels_ > 2) || - (num_channels_ < 1)) - { - return *this; - } - for(int i = 0; i < samples_per_channel_ * num_channels_; i++) - { - data_[i] = static_cast(data_[i] >> rhs); - } - return *this; +inline AudioFrame& AudioFrame::operator>>=(const int rhs) { + assert((num_channels_ > 0) && (num_channels_ < 3)); + if ((num_channels_ > 2) || (num_channels_ < 1)) return *this; + + for (int i = 0; i < samples_per_channel_ * num_channels_; i++) { + data_[i] = static_cast(data_[i] >> rhs); + } + return *this; } -inline -AudioFrame& -AudioFrame::Append(const AudioFrame& rhs) -{ - // Sanity check - assert((num_channels_ > 0) && (num_channels_ < 3)); - if((num_channels_ > 2) || - (num_channels_ < 1)) - { - return *this; - } - if(num_channels_ != rhs.num_channels_) - { - return *this; - } - if((vad_activity_ == kVadActive) || - rhs.vad_activity_ == kVadActive) - { - vad_activity_ = kVadActive; - } - else if((vad_activity_ == kVadUnknown) || - rhs.vad_activity_ == kVadUnknown) - { - vad_activity_ = kVadUnknown; - } - if(speech_type_ != rhs.speech_type_) - { - speech_type_ = kUndefined; - } +inline AudioFrame& AudioFrame::Append(const AudioFrame& rhs) { + // Sanity check + assert((num_channels_ > 0) && (num_channels_ < 3)); + assert(interleaved_ == rhs.interleaved_); + if ((num_channels_ > 2) || (num_channels_ < 1)) return *this; + if (num_channels_ != rhs.num_channels_) return *this; - int offset = samples_per_channel_ * num_channels_; - for(int i = 0; - i < rhs.samples_per_channel_ * rhs.num_channels_; - i++) - { - data_[offset+i] = rhs.data_[i]; - } - samples_per_channel_ += rhs.samples_per_channel_; - return *this; + if ((vad_activity_ == kVadActive) || rhs.vad_activity_ == kVadActive) { + vad_activity_ = kVadActive; + } else if (vad_activity_ == kVadUnknown || rhs.vad_activity_ == kVadUnknown) { + vad_activity_ = kVadUnknown; + } + if (speech_type_ != rhs.speech_type_) { + speech_type_ = kUndefined; + } + + int offset = samples_per_channel_ * num_channels_; + for (int i = 0; i < rhs.samples_per_channel_ * rhs.num_channels_; i++) { + data_[offset + i] = rhs.data_[i]; + } + samples_per_channel_ += rhs.samples_per_channel_; + return *this; } -// merge vectors -inline -AudioFrame& -AudioFrame::operator+=(const AudioFrame& rhs) -{ - // Sanity check - assert((num_channels_ > 0) && (num_channels_ < 3)); - if((num_channels_ > 2) || - (num_channels_ < 1)) - { - return *this; - } - if(num_channels_ != rhs.num_channels_) - { - return *this; - } - bool noPrevData = false; - if(samples_per_channel_ != rhs.samples_per_channel_) - { - if(samples_per_channel_ == 0) - { - // special case we have no data to start with - samples_per_channel_ = rhs.samples_per_channel_; - noPrevData = true; - } else - { - return *this; - } - } +inline AudioFrame& AudioFrame::operator+=(const AudioFrame& rhs) { + // Sanity check + assert((num_channels_ > 0) && (num_channels_ < 3)); + assert(interleaved_ == rhs.interleaved_); + if ((num_channels_ > 2) || (num_channels_ < 1)) return *this; + if (num_channels_ != rhs.num_channels_) return *this; - if((vad_activity_ == kVadActive) || - rhs.vad_activity_ == kVadActive) - { - vad_activity_ = kVadActive; - } - else if((vad_activity_ == kVadUnknown) || - rhs.vad_activity_ == kVadUnknown) - { - vad_activity_ = kVadUnknown; + bool noPrevData = false; + if (samples_per_channel_ != rhs.samples_per_channel_) { + if (samples_per_channel_ == 0) { + // special case we have no data to start with + samples_per_channel_ = rhs.samples_per_channel_; + noPrevData = true; + } else { + return *this; } + } - if(speech_type_ != rhs.speech_type_) - { - speech_type_ = kUndefined; - } + if ((vad_activity_ == kVadActive) || rhs.vad_activity_ == kVadActive) { + vad_activity_ = kVadActive; + } else if (vad_activity_ == kVadUnknown || rhs.vad_activity_ == kVadUnknown) { + vad_activity_ = kVadUnknown; + } - if(noPrevData) - { - memcpy(data_, rhs.data_, - sizeof(int16_t) * rhs.samples_per_channel_ * num_channels_); - } else - { - // IMPROVEMENT this can be done very fast in assembly - for(int i = 0; i < samples_per_channel_ * num_channels_; i++) - { - int32_t wrapGuard = static_cast(data_[i]) + - static_cast(rhs.data_[i]); - if(wrapGuard < -32768) - { - data_[i] = -32768; - }else if(wrapGuard > 32767) - { - data_[i] = 32767; - }else - { - data_[i] = (int16_t)wrapGuard; - } + if (speech_type_ != rhs.speech_type_) speech_type_ = kUndefined; + + if (noPrevData) { + memcpy(data_, rhs.data_, + sizeof(int16_t) * rhs.samples_per_channel_ * num_channels_); + } else { + // IMPROVEMENT this can be done very fast in assembly + for (int i = 0; i < samples_per_channel_ * num_channels_; i++) { + int32_t wrapGuard = + static_cast(data_[i]) + static_cast(rhs.data_[i]); + if (wrapGuard < -32768) { + data_[i] = -32768; + } else if (wrapGuard > 32767) { + data_[i] = 32767; + } else { + data_[i] = (int16_t)wrapGuard; } } - energy_ = 0xffffffff; - return *this; + } + energy_ = 0xffffffff; + return *this; } -inline -AudioFrame& -AudioFrame::operator-=(const AudioFrame& rhs) -{ - // Sanity check - assert((num_channels_ > 0) && (num_channels_ < 3)); - if((num_channels_ > 2)|| - (num_channels_ < 1)) - { - return *this; - } - if((samples_per_channel_ != rhs.samples_per_channel_) || - (num_channels_ != rhs.num_channels_)) - { - return *this; - } - if((vad_activity_ != kVadPassive) || - rhs.vad_activity_ != kVadPassive) - { - vad_activity_ = kVadUnknown; - } - speech_type_ = kUndefined; +inline AudioFrame& AudioFrame::operator-=(const AudioFrame& rhs) { + // Sanity check + assert((num_channels_ > 0) && (num_channels_ < 3)); + assert(interleaved_ == rhs.interleaved_); + if ((num_channels_ > 2) || (num_channels_ < 1)) return *this; - for(int i = 0; i < samples_per_channel_ * num_channels_; i++) - { - int32_t wrapGuard = static_cast(data_[i]) - - static_cast(rhs.data_[i]); - if(wrapGuard < -32768) - { - data_[i] = -32768; - } - else if(wrapGuard > 32767) - { - data_[i] = 32767; - } - else - { - data_[i] = (int16_t)wrapGuard; - } - } - energy_ = 0xffffffff; + if ((samples_per_channel_ != rhs.samples_per_channel_) || + (num_channels_ != rhs.num_channels_)) { return *this; + } + if ((vad_activity_ != kVadPassive) || rhs.vad_activity_ != kVadPassive) { + vad_activity_ = kVadUnknown; + } + speech_type_ = kUndefined; + + for (int i = 0; i < samples_per_channel_ * num_channels_; i++) { + int32_t wrapGuard = + static_cast(data_[i]) - static_cast(rhs.data_[i]); + if (wrapGuard < -32768) { + data_[i] = -32768; + } else if (wrapGuard > 32767) { + data_[i] = 32767; + } else { + data_[i] = (int16_t)wrapGuard; + } + } + energy_ = 0xffffffff; + return *this; } inline bool IsNewerSequenceNumber(uint16_t sequence_number, uint16_t prev_sequence_number) { return sequence_number != prev_sequence_number && - static_cast(sequence_number - prev_sequence_number) < 0x8000; + static_cast(sequence_number - prev_sequence_number) < 0x8000; } inline bool IsNewerTimestamp(uint32_t timestamp, uint32_t prev_timestamp) { return timestamp != prev_timestamp && - static_cast(timestamp - prev_timestamp) < 0x80000000; + static_cast(timestamp - prev_timestamp) < 0x80000000; } inline bool IsNewerOrSameTimestamp(uint32_t timestamp, uint32_t prev_timestamp) { @@ -1071,15 +912,15 @@ inline bool IsNewerOrSameTimestamp(uint32_t timestamp, uint32_t prev_timestamp) inline uint16_t LatestSequenceNumber(uint16_t sequence_number1, uint16_t sequence_number2) { - return IsNewerSequenceNumber(sequence_number1, sequence_number2) ? - sequence_number1 : sequence_number2; + return IsNewerSequenceNumber(sequence_number1, sequence_number2) + ? sequence_number1 + : sequence_number2; } inline uint32_t LatestTimestamp(uint32_t timestamp1, uint32_t timestamp2) { - return IsNewerTimestamp(timestamp1, timestamp2) ? timestamp1 : - timestamp2; + return IsNewerTimestamp(timestamp1, timestamp2) ? timestamp1 : timestamp2; } } // namespace webrtc -#endif // MODULE_COMMON_TYPES_H +#endif // MODULE_COMMON_TYPES_H diff --git a/media/webrtc/trunk/webrtc/modules/media_file/source/avi_file.cc b/media/webrtc/trunk/webrtc/modules/media_file/source/avi_file.cc index 92c51acce602..19baaa3b2183 100644 --- a/media/webrtc/trunk/webrtc/modules/media_file/source/avi_file.cc +++ b/media/webrtc/trunk/webrtc/modules/media_file/source/avi_file.cc @@ -23,7 +23,6 @@ #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" #include "webrtc/system_wrappers/interface/file_wrapper.h" -#include "webrtc/system_wrappers/interface/list_wrapper.h" #include "webrtc/system_wrappers/interface/trace.h" // http://msdn2.microsoft.com/en-us/library/ms779636.aspx @@ -178,8 +177,7 @@ AviFile::AviFile() _videoCodecConfigParamsLength(0), _videoStreamDataChunkPrefix(0), _audioStreamDataChunkPrefix(0), - _created(false), - _indexList(new ListWrapper()) + _created(false) { ResetComplexMembers(); } @@ -188,7 +186,6 @@ AviFile::~AviFile() { Close(); - delete _indexList; delete[] _videoCodecConfigParams; delete _crit; } @@ -1712,21 +1709,11 @@ uint32_t AviFile::StreamAndTwoCharCodeToTag(int32_t streamNum, void AviFile::ClearIndexList() { - while (!_indexList->Empty()) - { - ListItem* listItem = _indexList->First(); - if (listItem == 0) - { - break; - } - - AVIINDEXENTRY* item = static_cast(listItem->GetItem()); - if (item != NULL) - { - delete item; - } - _indexList->PopFront(); - } + for (IndexList::iterator iter = _indexList.begin(); + iter != _indexList.end(); ++iter) { + delete *iter; + } + _indexList.clear(); } void AviFile::AddChunkToIndexList(uint32_t inChunkId, @@ -1734,7 +1721,7 @@ void AviFile::AddChunkToIndexList(uint32_t inChunkId, uint32_t inOffset, uint32_t inSize) { - _indexList->PushBack(new AVIINDEXENTRY(inChunkId, inFlags, inOffset, + _indexList.push_back(new AVIINDEXENTRY(inChunkId, inFlags, inOffset, inSize)); } @@ -1747,19 +1734,13 @@ void AviFile::WriteIndex() _bytesWritten += PutLE32(0); const size_t idxChunkSize = _bytesWritten; - for (ListItem* listItem = _indexList->First(); - listItem != NULL; - listItem = _indexList->Next(listItem)) - { - const AVIINDEXENTRY* item = - static_cast(listItem->GetItem()); - if (item != NULL) - { - _bytesWritten += PutLE32(item->ckid); - _bytesWritten += PutLE32(item->dwFlags); - _bytesWritten += PutLE32(item->dwChunkOffset); - _bytesWritten += PutLE32(item->dwChunkLength); - } + for (IndexList::iterator iter = _indexList.begin(); + iter != _indexList.end(); ++iter) { + const AVIINDEXENTRY* item = *iter; + _bytesWritten += PutLE32(item->ckid); + _bytesWritten += PutLE32(item->dwFlags); + _bytesWritten += PutLE32(item->dwChunkOffset); + _bytesWritten += PutLE32(item->dwChunkLength); } PutLE32LengthFromCurrent(static_cast(idxChunkSize)); } diff --git a/media/webrtc/trunk/webrtc/modules/media_file/source/avi_file.h b/media/webrtc/trunk/webrtc/modules/media_file/source/avi_file.h index a55fc187481d..d8b10626dff0 100644 --- a/media/webrtc/trunk/webrtc/modules/media_file/source/avi_file.h +++ b/media/webrtc/trunk/webrtc/modules/media_file/source/avi_file.h @@ -14,12 +14,12 @@ #define WEBRTC_MODULES_MEDIA_FILE_SOURCE_AVI_FILE_H_ #include +#include #include "webrtc/typedefs.h" namespace webrtc { class CriticalSectionWrapper; -class ListWrapper; struct AVISTREAMHEADER { @@ -194,6 +194,7 @@ private: void WriteIndex(); private: + typedef std::list IndexList; struct AVIMAINHEADER { AVIMAINHEADER(); @@ -269,7 +270,7 @@ private: uint32_t _audioStreamDataChunkPrefix; bool _created; - ListWrapper* _indexList; // Elements are of type AVIINDEXENTRY. + IndexList _indexList; }; } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/media_file/source/media_file.gypi b/media/webrtc/trunk/webrtc/modules/media_file/source/media_file.gypi index 0d1b15fb64da..37df13c5ac32 100644 --- a/media/webrtc/trunk/webrtc/modules/media_file/source/media_file.gypi +++ b/media/webrtc/trunk/webrtc/modules/media_file/source/media_file.gypi @@ -14,9 +14,6 @@ 'dependencies': [ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', ], - 'defines': [ - 'WEBRTC_MODULE_UTILITY_VIDEO', # for compiling support for video recording - ], 'include_dirs': [ '../interface', '../../interface', diff --git a/media/webrtc/trunk/webrtc/modules/media_file/source/media_file_utility.cc b/media/webrtc/trunk/webrtc/modules/media_file/source/media_file_utility.cc index 8971954763d6..8dfddf77ba41 100644 --- a/media/webrtc/trunk/webrtc/modules/media_file/source/media_file_utility.cc +++ b/media/webrtc/trunk/webrtc/modules/media_file/source/media_file_utility.cc @@ -8,6 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include "webrtc/modules/media_file/source/media_file_utility.h" + #include #include #include @@ -15,7 +17,6 @@ #include "webrtc/common_types.h" #include "webrtc/engine_configurations.h" #include "webrtc/modules/interface/module_common_types.h" -#include "webrtc/modules/media_file/source/media_file_utility.h" #include "webrtc/system_wrappers/interface/file_wrapper.h" #include "webrtc/system_wrappers/interface/trace.h" diff --git a/media/webrtc/trunk/webrtc/modules/modules.gyp b/media/webrtc/trunk/webrtc/modules/modules.gyp index 2f84cd60aa61..a2887d068b9c 100644 --- a/media/webrtc/trunk/webrtc/modules/modules.gyp +++ b/media/webrtc/trunk/webrtc/modules/modules.gyp @@ -74,6 +74,9 @@ { 'target_name': 'modules_unittests', 'type': '<(gtest_target_type)', + 'defines': [ + '<@(audio_coding_defines)', + ], 'dependencies': [ 'audio_coding_module', 'audio_processing', @@ -100,15 +103,19 @@ '<(rbe_components_path)/remote_bitrate_estimator_components.gyp:rbe_components', '<(DEPTH)/testing/gmock.gyp:gmock', '<(DEPTH)/testing/gtest.gyp:gtest', + '<(DEPTH)/third_party/gflags/gflags.gyp:gflags', '<(webrtc_root)/common_audio/common_audio.gyp:common_audio', '<(webrtc_root)/modules/video_coding/codecs/vp8/vp8.gyp:webrtc_vp8', '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', '<(webrtc_root)/test/test.gyp:test_support_main', - '<(webrtc_root)/common_video/common_video.gyp:frame_generator', + '<(webrtc_root)/test/test.gyp:frame_generator', ], 'sources': [ + 'audio_coding/main/acm2/acm_receiver_unittest.cc', + 'audio_coding/main/acm2/call_statistics_unittest.cc', + 'audio_coding/main/acm2/initial_delay_manager_unittest.cc', + 'audio_coding/main/acm2/nack_unittest.cc', 'audio_coding/main/source/acm_neteq_unittest.cc', - 'audio_coding/main/source/nack_unittest.cc', 'audio_coding/codecs/cng/cng_unittest.cc', 'audio_coding/codecs/isac/fix/source/filters_unittest.cc', 'audio_coding/codecs/isac/fix/source/filterbanks_unittest.cc', @@ -155,32 +162,50 @@ 'audio_coding/neteq4/mock/mock_payload_splitter.h', 'audio_processing/aec/system_delay_unittest.cc', 'audio_processing/aec/echo_cancellation_unittest.cc', + 'audio_processing/audio_processing_impl_unittest.cc', 'audio_processing/echo_cancellation_impl_unittest.cc', 'audio_processing/test/audio_processing_unittest.cc', + 'audio_processing/test/test_utils.h', 'audio_processing/utility/delay_estimator_unittest.cc', 'audio_processing/utility/ring_buffer_unittest.cc', 'bitrate_controller/bitrate_controller_unittest.cc', + 'desktop_capture/desktop_and_cursor_composer_unittest.cc', 'desktop_capture/desktop_region_unittest.cc', 'desktop_capture/differ_block_unittest.cc', 'desktop_capture/differ_unittest.cc', + 'desktop_capture/mouse_cursor_monitor_unittest.cc', 'desktop_capture/screen_capturer_helper_unittest.cc', 'desktop_capture/screen_capturer_mac_unittest.cc', 'desktop_capture/screen_capturer_mock_objects.h', 'desktop_capture/screen_capturer_unittest.cc', 'desktop_capture/window_capturer_unittest.cc', - "desktop_capture/win/cursor_unittest.cc", - "desktop_capture/win/cursor_unittest_resources.h", - "desktop_capture/win/cursor_unittest_resources.rc", + 'desktop_capture/win/cursor_unittest.cc', + 'desktop_capture/win/cursor_unittest_resources.h', + 'desktop_capture/win/cursor_unittest_resources.rc', 'media_file/source/media_file_unittest.cc', 'module_common_types_unittest.cc', 'pacing/paced_sender_unittest.cc', + 'remote_bitrate_estimator/bwe_simulations.cc', 'remote_bitrate_estimator/include/mock/mock_remote_bitrate_observer.h', - 'remote_bitrate_estimator/bitrate_estimator_unittest.cc', + 'remote_bitrate_estimator/rate_statistics_unittest.cc', 'remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc', 'remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc', 'remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h', + 'remote_bitrate_estimator/remote_bitrate_estimators_test.cc', 'remote_bitrate_estimator/rtp_to_ntp_unittest.cc', + 'remote_bitrate_estimator/test/bwe_test_baselinefile.cc', + 'remote_bitrate_estimator/test/bwe_test_baselinefile.h', + 'remote_bitrate_estimator/test/bwe_test_fileutils.cc', + 'remote_bitrate_estimator/test/bwe_test_fileutils.h', + 'remote_bitrate_estimator/test/bwe_test_framework.cc', + 'remote_bitrate_estimator/test/bwe_test_framework.h', + 'remote_bitrate_estimator/test/bwe_test_framework_unittest.cc', + 'remote_bitrate_estimator/test/bwe_test_logging.cc', + 'remote_bitrate_estimator/test/bwe_test_logging.h', + 'remote_bitrate_estimator/test/bwe_test.cc', + 'remote_bitrate_estimator/test/bwe_test.h', 'rtp_rtcp/source/mock/mock_rtp_payload_strategy.h', + 'rtp_rtcp/source/byte_io_unittest.cc', 'rtp_rtcp/source/fec_receiver_unittest.cc', 'rtp_rtcp/source/fec_test_helper.cc', 'rtp_rtcp/source/fec_test_helper.h', @@ -196,6 +221,7 @@ 'rtp_rtcp/source/rtp_format_vp8_test_helper.h', 'rtp_rtcp/source/rtp_packet_history_unittest.cc', 'rtp_rtcp/source/rtp_payload_registry_unittest.cc', + 'rtp_rtcp/source/rtp_rtcp_impl_unittest.cc', 'rtp_rtcp/source/rtp_utility_unittest.cc', 'rtp_rtcp/source/rtp_header_extension_unittest.cc', 'rtp_rtcp/source/rtp_sender_unittest.cc', @@ -214,6 +240,7 @@ 'video_coding/main/interface/mock/mock_vcm_callbacks.h', 'video_coding/main/source/decoding_state_unittest.cc', 'video_coding/main/source/jitter_buffer_unittest.cc', + 'video_coding/main/source/media_optimization_unittest.cc', 'video_coding/main/source/receiver_unittest.cc', 'video_coding/main/source/session_info_unittest.cc', 'video_coding/main/source/timing_unittest.cc', @@ -236,10 +263,20 @@ 'video_processing/main/test/unit_test/video_processing_unittest.h', ], 'conditions': [ + ['enable_bwe_test_logging==1', { + 'defines': [ 'BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=1' ], + }, { + 'defines': [ 'BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0' ], + 'sources!': [ + 'remote_bitrate_estimator/test/bwe_test_logging.cc' + ], + }], # Run screen/window capturer tests only on platforms where they are # supported. ['desktop_capture_supported==0', { 'sources!': [ + 'desktop_capture/desktop_and_cursor_composer_unittest.cc', + 'desktop_capture/mouse_cursor_monitor_unittest.cc', 'desktop_capture/screen_capturer_helper_unittest.cc', 'desktop_capture/screen_capturer_mac_unittest.cc', 'desktop_capture/screen_capturer_mock_objects.h', @@ -357,10 +394,10 @@ 'target_name': 'modules_tests_run', 'type': 'none', 'dependencies': [ - '<(import_isolate_path):import_isolate_gypi', 'modules_tests', ], 'includes': [ + '../build/isolate.gypi', 'modules_tests.isolate', ], 'sources': [ @@ -371,10 +408,10 @@ 'target_name': 'modules_unittests_run', 'type': 'none', 'dependencies': [ - '<(import_isolate_path):import_isolate_gypi', 'modules_unittests', ], 'includes': [ + '../build/isolate.gypi', 'modules_unittests.isolate', ], 'sources': [ diff --git a/media/webrtc/trunk/webrtc/modules/modules_tests.isolate b/media/webrtc/trunk/webrtc/modules/modules_tests.isolate index a473720e2917..7a051f66eb69 100644 --- a/media/webrtc/trunk/webrtc/modules/modules_tests.isolate +++ b/media/webrtc/trunk/webrtc/modules/modules_tests.isolate @@ -21,7 +21,6 @@ 'variables': { 'command': [ '../../testing/test_env.py', - '../../tools/swarm_client/googletest/run_test_cases.py', '<(PRODUCT_DIR)/modules_tests<(EXECUTABLE_SUFFIX)', ], 'isolate_dependency_tracked': [ @@ -31,11 +30,11 @@ '../../resources/foreman_cif.yuv', '../../resources/paris_qcif.yuv', '../../testing/test_env.py', - '../../tools/swarm_client/run_isolated.py', - '../../tools/swarm_client/googletest/run_test_cases.py', - '../../tools/swarm_client/third_party/upload.py', '<(PRODUCT_DIR)/modules_tests<(EXECUTABLE_SUFFIX)', ], + 'isolate_dependency_untracked': [ + '../../tools/swarming_client/', + ], }, }], ], diff --git a/media/webrtc/trunk/webrtc/modules/modules_unittests.isolate b/media/webrtc/trunk/webrtc/modules/modules_unittests.isolate index 43e659ce0967..2257180e4132 100644 --- a/media/webrtc/trunk/webrtc/modules/modules_unittests.isolate +++ b/media/webrtc/trunk/webrtc/modules/modules_unittests.isolate @@ -21,12 +21,17 @@ 'variables': { 'command': [ '../../testing/test_env.py', - '../../tools/swarm_client/googletest/run_test_cases.py', '<(PRODUCT_DIR)/modules_unittests<(EXECUTABLE_SUFFIX)', ], 'isolate_dependency_tracked': [ - '../../data/voice_engine/audio_tiny48.wav', '../../DEPS', + '../../data/audio_processing/output_data_float.pb', + '../../data/voice_engine/audio_tiny48.wav', + '../../resources/att-downlink.rx', + '../../resources/att-uplink.rx', + '../../resources/audio_coding/neteq4_network_stats.dat', + '../../resources/audio_coding/neteq4_rtcp_stats.dat', + '../../resources/audio_coding/neteq4_universal_ref.pcm', '../../resources/audio_coding/neteq_network_stats.dat', '../../resources/audio_coding/neteq_rtcp_stats.dat', '../../resources/audio_coding/neteq_universal_new.rtp', @@ -42,17 +47,50 @@ '../../resources/near16_stereo.pcm', '../../resources/near32_stereo.pcm', '../../resources/near8_stereo.pcm', + '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_0_AST.bin', + '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_0_TOF.bin', + '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_1_AST.bin', + '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_1_TOF.bin', + '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_0_AST.bin', + '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_0_TOF.bin', + '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_1_AST.bin', + '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_1_TOF.bin', + '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingDelay1_0_AST.bin', + '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingDelay1_0_TOF.bin', + '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingLoss1_0_AST.bin', + '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingLoss1_0_TOF.bin', + '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_Multi1_1_AST.bin', + '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_Multi1_1_TOF.bin', + '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_0_AST.bin', + '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_0_TOF.bin', + '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_1_AST.bin', + '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_1_TOF.bin', + '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyDelay_0_AST.bin', + '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyDelay_0_TOF.bin', + '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyLoss_0_AST.bin', + '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyLoss_0_TOF.bin', + '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_UnlimitedSpeed_0_AST.bin', + '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_UnlimitedSpeed_0_TOF.bin', + '../../resources/sprint-downlink.rx', + '../../resources/sprint-uplink.rx', + '../../resources/synthetic-trace.rx', + '../../resources/tmobile-downlink.rx', + '../../resources/tmobile-uplink.rx', + '../../resources/verizon3g-downlink.rx', + '../../resources/verizon3g-uplink.rx', + '../../resources/verizon4g-downlink.rx', + '../../resources/verizon4g-uplink.rx', '../../resources/video_coding/frame-ethernet-ii.pcap', '../../resources/video_coding/frame-loopback.pcap', '../../resources/video_coding/pltype103.rtp', '../../resources/video_coding/ssrcs-2.pcap', '../../resources/video_coding/ssrcs-3.pcap', '../../testing/test_env.py', - '../../tools/swarm_client/run_isolated.py', - '../../tools/swarm_client/googletest/run_test_cases.py', - '../../tools/swarm_client/third_party/upload.py', '<(PRODUCT_DIR)/modules_unittests<(EXECUTABLE_SUFFIX)', ], + 'isolate_dependency_untracked': [ + '../../tools/swarming_client/', + ], }, }], ], diff --git a/media/webrtc/trunk/webrtc/modules/pacing/include/mock/mock_paced_sender.h b/media/webrtc/trunk/webrtc/modules/pacing/include/mock/mock_paced_sender.h index c8f2a180f80f..3841ef375330 100644 --- a/media/webrtc/trunk/webrtc/modules/pacing/include/mock/mock_paced_sender.h +++ b/media/webrtc/trunk/webrtc/modules/pacing/include/mock/mock_paced_sender.h @@ -22,11 +22,12 @@ namespace webrtc { class MockPacedSender : public PacedSender { public: MockPacedSender() : PacedSender(NULL, 0, 0) {} - MOCK_METHOD5(SendPacket, bool(Priority priority, + MOCK_METHOD6(SendPacket, bool(Priority priority, uint32_t ssrc, uint16_t sequence_number, int64_t capture_time_ms, - int bytes)); + int bytes, + bool retransmission)); MOCK_CONST_METHOD0(QueueInMs, int()); MOCK_CONST_METHOD0(QueueInPackets, int()); }; diff --git a/media/webrtc/trunk/webrtc/modules/pacing/include/paced_sender.h b/media/webrtc/trunk/webrtc/modules/pacing/include/paced_sender.h index 3bd9896719d5..045469009f02 100644 --- a/media/webrtc/trunk/webrtc/modules/pacing/include/paced_sender.h +++ b/media/webrtc/trunk/webrtc/modules/pacing/include/paced_sender.h @@ -43,13 +43,18 @@ class PacedSender : public Module { // module again. // Called when it's time to send a queued packet. // Returns false if packet cannot be sent. - virtual bool TimeToSendPacket(uint32_t ssrc, uint16_t sequence_number, - int64_t capture_time_ms) = 0; + virtual bool TimeToSendPacket(uint32_t ssrc, + uint16_t sequence_number, + int64_t capture_time_ms, + bool retransmission) = 0; // Called when it's a good time to send a padding data. virtual int TimeToSendPadding(int bytes) = 0; protected: virtual ~Callback() {} }; + + static const int kDefaultMaxQueueLengthMs = 2000; + PacedSender(Callback* callback, int target_bitrate_kbps, float pace_multiplier); @@ -80,9 +85,14 @@ class PacedSender : public Module { uint32_t ssrc, uint16_t sequence_number, int64_t capture_time_ms, - int bytes); + int bytes, + bool retransmission); - // Returns the time since the oldest queued packet was captured. + // Sets the max length of the pacer queue in milliseconds. + // A negative queue size is interpreted as infinite. + virtual void set_max_queue_length_ms(int max_queue_length_ms); + + // Returns the time since the oldest queued packet was enqueued. virtual int QueueInMs() const; // Returns the number of milliseconds until the module want a worker thread @@ -98,8 +108,9 @@ class PacedSender : public Module { bool ShouldSendNextPacket(paced_sender::PacketList** packet_list); // Local helper function to GetNextPacket. - void GetNextPacketFromList(paced_sender::PacketList* packets, - uint32_t* ssrc, uint16_t* sequence_number, int64_t* capture_time_ms); + paced_sender::Packet GetNextPacketFromList(paced_sender::PacketList* packets); + + bool SendPacketFromList(paced_sender::PacketList* packet_list); // Updates the number of bytes that can be sent for the next time interval. void UpdateBytesPerInterval(uint32_t delta_time_in_ms); @@ -111,6 +122,7 @@ class PacedSender : public Module { const float pace_multiplier_; bool enabled_; bool paused_; + int max_queue_length_ms_; scoped_ptr critsect_; // This is the media budget, keeping track of how many bits of media // we can pace out during the current interval. diff --git a/media/webrtc/trunk/webrtc/modules/pacing/paced_sender.cc b/media/webrtc/trunk/webrtc/modules/pacing/paced_sender.cc index 49e8ef829bdb..e47614c89c38 100644 --- a/media/webrtc/trunk/webrtc/modules/pacing/paced_sender.cc +++ b/media/webrtc/trunk/webrtc/modules/pacing/paced_sender.cc @@ -36,16 +36,20 @@ namespace webrtc { namespace paced_sender { struct Packet { Packet(uint32_t ssrc, uint16_t seq_number, int64_t capture_time_ms, - int length_in_bytes) + int64_t enqueue_time_ms, int length_in_bytes, bool retransmission) : ssrc_(ssrc), sequence_number_(seq_number), capture_time_ms_(capture_time_ms), - bytes_(length_in_bytes) { + enqueue_time_ms_(enqueue_time_ms), + bytes_(length_in_bytes), + retransmission_(retransmission) { } uint32_t ssrc_; uint16_t sequence_number_; int64_t capture_time_ms_; + int64_t enqueue_time_ms_; int bytes_; + bool retransmission_; }; // STL list style class which prevents duplicates in the list. @@ -105,7 +109,7 @@ class IntervalBudget { void UseBudget(int bytes) { bytes_remaining_ = std::max(bytes_remaining_ - bytes, - -100 * target_rate_kbps_ / 8); + -500 * target_rate_kbps_ / 8); } int bytes_remaining() const { return bytes_remaining_; } @@ -122,6 +126,7 @@ PacedSender::PacedSender(Callback* callback, int target_bitrate_kbps, pace_multiplier_(pace_multiplier), enabled_(false), paused_(false), + max_queue_length_ms_(kDefaultMaxQueueLengthMs), critsect_(CriticalSectionWrapper::CreateCriticalSection()), media_budget_(new paced_sender::IntervalBudget( pace_multiplier_ * target_bitrate_kbps)), @@ -170,7 +175,8 @@ void PacedSender::UpdateBitrate(int target_bitrate_kbps, } bool PacedSender::SendPacket(Priority priority, uint32_t ssrc, - uint16_t sequence_number, int64_t capture_time_ms, int bytes) { + uint16_t sequence_number, int64_t capture_time_ms, int bytes, + bool retransmission) { CriticalSectionScoped cs(critsect_.get()); if (!enabled_) { @@ -197,31 +203,40 @@ bool PacedSender::SendPacket(Priority priority, uint32_t ssrc, packet_list = low_priority_packets_.get(); break; } - packet_list->push_back(paced_sender::Packet(ssrc, sequence_number, - capture_time_ms, bytes)); + packet_list->push_back(paced_sender::Packet(ssrc, + sequence_number, + capture_time_ms, + TickTime::MillisecondTimestamp(), + bytes, + retransmission)); return false; } +void PacedSender::set_max_queue_length_ms(int max_queue_length_ms) { + CriticalSectionScoped cs(critsect_.get()); + max_queue_length_ms_ = max_queue_length_ms; +} + int PacedSender::QueueInMs() const { CriticalSectionScoped cs(critsect_.get()); int64_t now_ms = TickTime::MillisecondTimestamp(); - int64_t oldest_packet_capture_time = now_ms; + int64_t oldest_packet_enqueue_time = now_ms; if (!high_priority_packets_->empty()) { - oldest_packet_capture_time = std::min( - oldest_packet_capture_time, - high_priority_packets_->front().capture_time_ms_); + oldest_packet_enqueue_time = std::min( + oldest_packet_enqueue_time, + high_priority_packets_->front().enqueue_time_ms_); } if (!normal_priority_packets_->empty()) { - oldest_packet_capture_time = std::min( - oldest_packet_capture_time, - normal_priority_packets_->front().capture_time_ms_); + oldest_packet_enqueue_time = std::min( + oldest_packet_enqueue_time, + normal_priority_packets_->front().enqueue_time_ms_); } if (!low_priority_packets_->empty()) { - oldest_packet_capture_time = std::min( - oldest_packet_capture_time, - low_priority_packets_->front().capture_time_ms_); + oldest_packet_enqueue_time = std::min( + oldest_packet_enqueue_time, + low_priority_packets_->front().enqueue_time_ms_); } - return now_ms - oldest_packet_capture_time; + return now_ms - oldest_packet_enqueue_time; } int32_t PacedSender::TimeUntilNextProcess() { @@ -250,34 +265,10 @@ int32_t PacedSender::Process() { uint32_t delta_time_ms = std::min(kMaxIntervalTimeMs, elapsed_time_ms); UpdateBytesPerInterval(delta_time_ms); } - uint32_t ssrc; - uint16_t sequence_number; - int64_t capture_time_ms; paced_sender::PacketList* packet_list; while (ShouldSendNextPacket(&packet_list)) { - GetNextPacketFromList(packet_list, &ssrc, &sequence_number, - &capture_time_ms); - critsect_->Leave(); - - const bool success = callback_->TimeToSendPacket(ssrc, sequence_number, - capture_time_ms); - critsect_->Enter(); - // If packet cannot be sent then keep it in packet list and exit early. - // There's no need to send more packets. - if (!success) { + if (!SendPacketFromList(packet_list)) return 0; - } - packet_list->pop_front(); - const bool last_packet = packet_list->empty() || - packet_list->front().capture_time_ms_ > capture_time_ms; - if (packet_list != high_priority_packets_.get()) { - if (capture_time_ms > capture_time_ms_last_sent_) { - capture_time_ms_last_sent_ = capture_time_ms; - } else if (capture_time_ms == capture_time_ms_last_sent_ && - last_packet) { - TRACE_EVENT_ASYNC_END0("webrtc_rtp", "PacedSend", capture_time_ms); - } - } } if (high_priority_packets_->empty() && normal_priority_packets_->empty() && @@ -298,6 +289,37 @@ int32_t PacedSender::Process() { return 0; } +// MUST have critsect_ when calling. +bool PacedSender::SendPacketFromList(paced_sender::PacketList* packet_list) + EXCLUSIVE_LOCKS_REQUIRED(critsect_.get()) { + paced_sender::Packet packet = GetNextPacketFromList(packet_list); + critsect_->Leave(); + + const bool success = callback_->TimeToSendPacket(packet.ssrc_, + packet.sequence_number_, + packet.capture_time_ms_, + packet.retransmission_); + critsect_->Enter(); + // If packet cannot be sent then keep it in packet list and exit early. + // There's no need to send more packets. + if (!success) { + return false; + } + packet_list->pop_front(); + const bool last_packet = packet_list->empty() || + packet_list->front().capture_time_ms_ > packet.capture_time_ms_; + if (packet_list != high_priority_packets_.get()) { + if (packet.capture_time_ms_ > capture_time_ms_last_sent_) { + capture_time_ms_last_sent_ = packet.capture_time_ms_; + } else if (packet.capture_time_ms_ == capture_time_ms_last_sent_ && + last_packet) { + TRACE_EVENT_ASYNC_END0("webrtc_rtp", "PacedSend", + packet.capture_time_ms_); + } + } + return true; +} + // MUST have critsect_ when calling. void PacedSender::UpdateBytesPerInterval(uint32_t delta_time_ms) { media_budget_->IncreaseBudget(delta_time_ms); @@ -307,6 +329,7 @@ void PacedSender::UpdateBytesPerInterval(uint32_t delta_time_ms) { // MUST have critsect_ when calling. bool PacedSender::ShouldSendNextPacket(paced_sender::PacketList** packet_list) { + *packet_list = NULL; if (media_budget_->bytes_remaining() <= 0) { // All bytes consumed for this interval. // Check if we have not sent in a too long time. @@ -321,6 +344,22 @@ bool PacedSender::ShouldSendNextPacket(paced_sender::PacketList** packet_list) { return true; } } + // Send any old packets to avoid queuing for too long. + if (max_queue_length_ms_ >= 0 && QueueInMs() > max_queue_length_ms_) { + int64_t high_priority_capture_time = -1; + if (!high_priority_packets_->empty()) { + high_priority_capture_time = + high_priority_packets_->front().capture_time_ms_; + *packet_list = high_priority_packets_.get(); + } + if (!normal_priority_packets_->empty() && + (high_priority_capture_time == -1 || high_priority_capture_time > + normal_priority_packets_->front().capture_time_ms_)) { + *packet_list = normal_priority_packets_.get(); + } + if (*packet_list) + return true; + } return false; } if (!high_priority_packets_->empty()) { @@ -338,13 +377,11 @@ bool PacedSender::ShouldSendNextPacket(paced_sender::PacketList** packet_list) { return false; } -void PacedSender::GetNextPacketFromList(paced_sender::PacketList* packets, - uint32_t* ssrc, uint16_t* sequence_number, int64_t* capture_time_ms) { +paced_sender::Packet PacedSender::GetNextPacketFromList( + paced_sender::PacketList* packets) { paced_sender::Packet packet = packets->front(); UpdateMediaBytesSent(packet.bytes_); - *sequence_number = packet.sequence_number_; - *ssrc = packet.ssrc_; - *capture_time_ms = packet.capture_time_ms_; + return packet; } // MUST have critsect_ when calling. diff --git a/media/webrtc/trunk/webrtc/modules/pacing/paced_sender_unittest.cc b/media/webrtc/trunk/webrtc/modules/pacing/paced_sender_unittest.cc index 286c09747b7d..f8dcdfc69854 100644 --- a/media/webrtc/trunk/webrtc/modules/pacing/paced_sender_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/pacing/paced_sender_unittest.cc @@ -24,8 +24,9 @@ static const float kPaceMultiplier = 1.5f; class MockPacedSenderCallback : public PacedSender::Callback { public: - MOCK_METHOD3(TimeToSendPacket, - bool(uint32_t ssrc, uint16_t sequence_number, int64_t capture_time_ms)); + MOCK_METHOD4(TimeToSendPacket, + bool(uint32_t ssrc, uint16_t sequence_number, int64_t capture_time_ms, + bool retransmission)); MOCK_METHOD1(TimeToSendPadding, int(int bytes)); }; @@ -35,7 +36,7 @@ class PacedSenderPadding : public PacedSender::Callback { PacedSenderPadding() : padding_sent_(0) {} bool TimeToSendPacket(uint32_t ssrc, uint16_t sequence_number, - int64_t capture_time_ms) { + int64_t capture_time_ms, bool retransmission) { return true; } @@ -65,11 +66,12 @@ class PacedSenderTest : public ::testing::Test { void SendAndExpectPacket(PacedSender::Priority priority, uint32_t ssrc, uint16_t sequence_number, - int64_t capture_time_ms, int size) { + int64_t capture_time_ms, int size, + bool retransmission) { EXPECT_FALSE(send_bucket_->SendPacket(priority, ssrc, - sequence_number, capture_time_ms, size)); + sequence_number, capture_time_ms, size, retransmission)); EXPECT_CALL(callback_, TimeToSendPacket( - ssrc, sequence_number, capture_time_ms)) + ssrc, sequence_number, capture_time_ms, false)) .Times(1) .WillRepeatedly(Return(true)); } @@ -81,53 +83,50 @@ class PacedSenderTest : public ::testing::Test { TEST_F(PacedSenderTest, QueuePacket) { uint32_t ssrc = 12345; uint16_t sequence_number = 1234; - int64_t capture_time_ms = 56789; // Due to the multiplicative factor we can send 3 packets not 2 packets. SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++, - capture_time_ms, 250); + TickTime::MillisecondTimestamp(), 250, false); SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++, - capture_time_ms, 250); + TickTime::MillisecondTimestamp(), 250, false); SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++, - capture_time_ms, 250); + TickTime::MillisecondTimestamp(), 250, false); + int64_t queued_packet_timestamp = TickTime::MillisecondTimestamp(); EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority, ssrc, - sequence_number, capture_time_ms, 250)); + sequence_number, queued_packet_timestamp, 250, false)); send_bucket_->Process(); EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess()); EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0); - EXPECT_CALL(callback_, - TimeToSendPacket(ssrc, sequence_number, capture_time_ms)).Times(0); TickTime::AdvanceFakeClock(4); EXPECT_EQ(1, send_bucket_->TimeUntilNextProcess()); TickTime::AdvanceFakeClock(1); EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess()); EXPECT_CALL(callback_, TimeToSendPacket( - ssrc, sequence_number++, capture_time_ms)) + ssrc, sequence_number++, queued_packet_timestamp, false)) .Times(1) .WillRepeatedly(Return(true)); send_bucket_->Process(); sequence_number++; SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++, - capture_time_ms, 250); + TickTime::MillisecondTimestamp(), 250, false); SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++, - capture_time_ms, 250); + TickTime::MillisecondTimestamp(), 250, false); EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority, ssrc, - sequence_number++, capture_time_ms, 250)); + sequence_number++, TickTime::MillisecondTimestamp(), 250, false)); send_bucket_->Process(); } TEST_F(PacedSenderTest, PaceQueuedPackets) { uint32_t ssrc = 12345; uint16_t sequence_number = 1234; - int64_t capture_time_ms = 56789; // Due to the multiplicative factor we can send 3 packets not 2 packets. for (int i = 0; i < 3; ++i) { SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++, - capture_time_ms, 250); + TickTime::MillisecondTimestamp(), 250, false); } for (int j = 0; j < 30; ++j) { EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority, ssrc, - sequence_number++, capture_time_ms, 250)); + sequence_number++, TickTime::MillisecondTimestamp(), 250, false)); } send_bucket_->Process(); EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0); @@ -135,7 +134,7 @@ TEST_F(PacedSenderTest, PaceQueuedPackets) { EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess()); TickTime::AdvanceFakeClock(5); EXPECT_CALL(callback_, - TimeToSendPacket(ssrc, _, capture_time_ms)) + TimeToSendPacket(ssrc, _, _, false)) .Times(3) .WillRepeatedly(Return(true)); EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess()); @@ -146,35 +145,34 @@ TEST_F(PacedSenderTest, PaceQueuedPackets) { EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess()); EXPECT_EQ(0, send_bucket_->Process()); SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++, - capture_time_ms, 250); + TickTime::MillisecondTimestamp(), 250, false); SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++, - capture_time_ms, 250); + TickTime::MillisecondTimestamp(), 250, false); SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++, - capture_time_ms, 250); + TickTime::MillisecondTimestamp(), 250, false); EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority, ssrc, - sequence_number, capture_time_ms, 250)); + sequence_number, TickTime::MillisecondTimestamp(), 250, false)); send_bucket_->Process(); } TEST_F(PacedSenderTest, PaceQueuedPacketsWithDuplicates) { uint32_t ssrc = 12345; uint16_t sequence_number = 1234; - int64_t capture_time_ms = 56789; uint16_t queued_sequence_number; // Due to the multiplicative factor we can send 3 packets not 2 packets. for (int i = 0; i < 3; ++i) { SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++, - capture_time_ms, 250); + TickTime::MillisecondTimestamp(), 250, false); } queued_sequence_number = sequence_number; for (int j = 0; j < 30; ++j) { // Send in duplicate packets. EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority, ssrc, - sequence_number, capture_time_ms, 250)); + sequence_number, TickTime::MillisecondTimestamp(), 250, false)); EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority, ssrc, - sequence_number++, capture_time_ms, 250)); + sequence_number++, TickTime::MillisecondTimestamp(), 250, false)); } EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0); send_bucket_->Process(); @@ -184,7 +182,8 @@ TEST_F(PacedSenderTest, PaceQueuedPacketsWithDuplicates) { for (int i = 0; i < 3; ++i) { EXPECT_CALL(callback_, TimeToSendPacket(ssrc, queued_sequence_number++, - capture_time_ms)) + _, + false)) .Times(1) .WillRepeatedly(Return(true)); } @@ -196,29 +195,28 @@ TEST_F(PacedSenderTest, PaceQueuedPacketsWithDuplicates) { EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess()); EXPECT_EQ(0, send_bucket_->Process()); SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++, - capture_time_ms, 250); + TickTime::MillisecondTimestamp(), 250, false); SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++, - capture_time_ms, 250); + TickTime::MillisecondTimestamp(), 250, false); SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++, - capture_time_ms, 250); + TickTime::MillisecondTimestamp(), 250, false); EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority, ssrc, - sequence_number++, capture_time_ms, 250)); + sequence_number++, TickTime::MillisecondTimestamp(), 250, false)); send_bucket_->Process(); } TEST_F(PacedSenderTest, Padding) { uint32_t ssrc = 12345; uint16_t sequence_number = 1234; - int64_t capture_time_ms = 56789; send_bucket_->UpdateBitrate(kTargetBitrate, kTargetBitrate, kTargetBitrate); // Due to the multiplicative factor we can send 3 packets not 2 packets. SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++, - capture_time_ms, 250); + TickTime::MillisecondTimestamp(), 250, false); SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++, - capture_time_ms, 250); + TickTime::MillisecondTimestamp(), 250, false); SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++, - capture_time_ms, 250); + TickTime::MillisecondTimestamp(), 250, false); // No padding is expected since we have sent too much already. EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0); EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess()); @@ -261,7 +259,7 @@ TEST_F(PacedSenderTest, VerifyPaddingUpToBitrate) { int64_t start_time = TickTime::MillisecondTimestamp(); while (TickTime::MillisecondTimestamp() - start_time < kBitrateWindow) { SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++, - capture_time_ms, 250); + capture_time_ms, 250, false); TickTime::AdvanceFakeClock(kTimeStep); EXPECT_CALL(callback_, TimeToSendPadding(250)).Times(1). WillOnce(Return(250)); @@ -282,7 +280,7 @@ TEST_F(PacedSenderTest, VerifyMaxPaddingBitrate) { int64_t start_time = TickTime::MillisecondTimestamp(); while (TickTime::MillisecondTimestamp() - start_time < kBitrateWindow) { SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++, - capture_time_ms, 250); + capture_time_ms, 250, false); TickTime::AdvanceFakeClock(kTimeStep); EXPECT_CALL(callback_, TimeToSendPadding(500)).Times(1). WillOnce(Return(250)); @@ -307,7 +305,7 @@ TEST_F(PacedSenderTest, VerifyAverageBitrateVaryingMediaPayload) { int media_payload = rand() % 100 + 200; // [200, 300] bytes. EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority, ssrc, sequence_number++, capture_time_ms, - media_payload)); + media_payload, false)); media_bytes += media_payload; TickTime::AdvanceFakeClock(kTimeStep); send_bucket_->Process(); @@ -325,26 +323,27 @@ TEST_F(PacedSenderTest, Priority) { // Due to the multiplicative factor we can send 3 packets not 2 packets. SendAndExpectPacket(PacedSender::kLowPriority, ssrc, sequence_number++, - capture_time_ms, 250); + capture_time_ms, 250, false); SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++, - capture_time_ms, 250); + capture_time_ms, 250, false); SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++, - capture_time_ms, 250); + capture_time_ms, 250, false); send_bucket_->Process(); // Expect normal and low priority to be queued and high to pass through. EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kLowPriority, - ssrc_low_priority, sequence_number++, capture_time_ms_low_priority, 250)); + ssrc_low_priority, sequence_number++, capture_time_ms_low_priority, 250, + false)); EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority, - ssrc, sequence_number++, capture_time_ms, 250)); + ssrc, sequence_number++, capture_time_ms, 250, false)); EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority, - ssrc, sequence_number++, capture_time_ms, 250)); + ssrc, sequence_number++, capture_time_ms, 250, false)); EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kHighPriority, - ssrc, sequence_number++, capture_time_ms, 250)); + ssrc, sequence_number++, capture_time_ms, 250, false)); // Expect all high and normal priority to be sent out first. EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0); - EXPECT_CALL(callback_, TimeToSendPacket(ssrc, _, capture_time_ms)) + EXPECT_CALL(callback_, TimeToSendPacket(ssrc, _, capture_time_ms, false)) .Times(3) .WillRepeatedly(Return(true)); @@ -354,7 +353,7 @@ TEST_F(PacedSenderTest, Priority) { EXPECT_EQ(0, send_bucket_->Process()); EXPECT_CALL(callback_, TimeToSendPacket( - ssrc_low_priority, _, capture_time_ms_low_priority)) + ssrc_low_priority, _, capture_time_ms_low_priority, false)) .Times(1) .WillRepeatedly(Return(true)); @@ -369,38 +368,41 @@ TEST_F(PacedSenderTest, Pause) { uint32_t ssrc = 12346; uint16_t sequence_number = 1234; int64_t capture_time_ms = TickTime::MillisecondTimestamp(); - TickTime::AdvanceFakeClock(10000); - int64_t second_capture_time_ms = TickTime::MillisecondTimestamp(); EXPECT_EQ(0, send_bucket_->QueueInMs()); // Due to the multiplicative factor we can send 3 packets not 2 packets. SendAndExpectPacket(PacedSender::kLowPriority, ssrc, sequence_number++, - capture_time_ms, 250); + capture_time_ms, 250, false); SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++, - capture_time_ms, 250); + capture_time_ms, 250, false); SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++, - capture_time_ms, 250); + capture_time_ms, 250, false); send_bucket_->Process(); send_bucket_->Pause(); + EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority, + ssrc, sequence_number++, capture_time_ms, 250, false)); + EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority, + ssrc, sequence_number++, capture_time_ms, 250, false)); + EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kHighPriority, + ssrc, sequence_number++, capture_time_ms, 250, false)); + + TickTime::AdvanceFakeClock(10000); + int64_t second_capture_time_ms = TickTime::MillisecondTimestamp(); + // Expect everything to be queued. EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kLowPriority, - ssrc_low_priority, sequence_number++, second_capture_time_ms, 250)); - EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority, - ssrc, sequence_number++, capture_time_ms, 250)); - EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority, - ssrc, sequence_number++, capture_time_ms, 250)); - EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kHighPriority, - ssrc, sequence_number++, capture_time_ms, 250)); + ssrc_low_priority, sequence_number++, second_capture_time_ms, 250, + false)); EXPECT_EQ(TickTime::MillisecondTimestamp() - capture_time_ms, send_bucket_->QueueInMs()); // Expect no packet to come out while paused. EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0); - EXPECT_CALL(callback_, TimeToSendPacket(_, _, _)).Times(0); + EXPECT_CALL(callback_, TimeToSendPacket(_, _, _, _)).Times(0); for (int i = 0; i < 10; ++i) { TickTime::AdvanceFakeClock(5); @@ -409,7 +411,7 @@ TEST_F(PacedSenderTest, Pause) { } // Expect high prio packets to come out first followed by all packets in the // way they were added. - EXPECT_CALL(callback_, TimeToSendPacket(_, _, capture_time_ms)) + EXPECT_CALL(callback_, TimeToSendPacket(_, _, capture_time_ms, false)) .Times(3) .WillRepeatedly(Return(true)); send_bucket_->Resume(); @@ -419,7 +421,7 @@ TEST_F(PacedSenderTest, Pause) { EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess()); EXPECT_EQ(0, send_bucket_->Process()); - EXPECT_CALL(callback_, TimeToSendPacket(_, _, second_capture_time_ms)) + EXPECT_CALL(callback_, TimeToSendPacket(_, _, second_capture_time_ms, false)) .Times(1) .WillRepeatedly(Return(true)); EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess()); @@ -439,18 +441,21 @@ TEST_F(PacedSenderTest, ResendPacket) { ssrc, sequence_number, capture_time_ms, - 250)); + 250, + false)); + TickTime::AdvanceFakeClock(1); EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority, ssrc, sequence_number + 1, capture_time_ms + 1, - 250)); - TickTime::AdvanceFakeClock(10000); + 250, + false)); + TickTime::AdvanceFakeClock(9999); EXPECT_EQ(TickTime::MillisecondTimestamp() - capture_time_ms, send_bucket_->QueueInMs()); // Fails to send first packet so only one call. EXPECT_CALL(callback_, TimeToSendPacket( - ssrc, sequence_number, capture_time_ms)) + ssrc, sequence_number, capture_time_ms, false)) .Times(1) .WillOnce(Return(false)); TickTime::AdvanceFakeClock(10000); @@ -462,11 +467,11 @@ TEST_F(PacedSenderTest, ResendPacket) { // Fails to send second packet. EXPECT_CALL(callback_, TimeToSendPacket( - ssrc, sequence_number, capture_time_ms)) + ssrc, sequence_number, capture_time_ms, false)) .Times(1) .WillOnce(Return(true)); EXPECT_CALL(callback_, TimeToSendPacket( - ssrc, sequence_number + 1, capture_time_ms + 1)) + ssrc, sequence_number + 1, capture_time_ms + 1, false)) .Times(1) .WillOnce(Return(false)); TickTime::AdvanceFakeClock(10000); @@ -478,7 +483,7 @@ TEST_F(PacedSenderTest, ResendPacket) { // Send second packet and queue becomes empty. EXPECT_CALL(callback_, TimeToSendPacket( - ssrc, sequence_number + 1, capture_time_ms + 1)) + ssrc, sequence_number + 1, capture_time_ms + 1, false)) .Times(1) .WillOnce(Return(true)); TickTime::AdvanceFakeClock(10000); @@ -486,5 +491,52 @@ TEST_F(PacedSenderTest, ResendPacket) { EXPECT_EQ(0, send_bucket_->QueueInMs()); } +TEST_F(PacedSenderTest, MaxQueueLength) { + uint32_t ssrc = 12346; + uint16_t sequence_number = 1234; + EXPECT_EQ(0, send_bucket_->QueueInMs()); + + send_bucket_->UpdateBitrate(30, 0, 0); + for (int i = 0; i < 30; ++i) { + SendAndExpectPacket(PacedSender::kNormalPriority, + ssrc, + sequence_number++, + TickTime::MillisecondTimestamp(), + 1200, + false); + } + + TickTime::AdvanceFakeClock(2001); + SendAndExpectPacket(PacedSender::kNormalPriority, + ssrc, + sequence_number++, + TickTime::MillisecondTimestamp(), + 1200, + false); + EXPECT_EQ(2001, send_bucket_->QueueInMs()); + send_bucket_->Process(); + EXPECT_EQ(0, send_bucket_->QueueInMs()); + TickTime::AdvanceFakeClock(31); + send_bucket_->Process(); +} + +TEST_F(PacedSenderTest, QueueTimeGrowsOverTime) { + uint32_t ssrc = 12346; + uint16_t sequence_number = 1234; + EXPECT_EQ(0, send_bucket_->QueueInMs()); + + send_bucket_->UpdateBitrate(30, 0, 0); + SendAndExpectPacket(PacedSender::kNormalPriority, + ssrc, + sequence_number, + TickTime::MillisecondTimestamp(), + 1200, + false); + + TickTime::AdvanceFakeClock(500); + EXPECT_EQ(500, send_bucket_->QueueInMs()); + send_bucket_->Process(); + EXPECT_EQ(0, send_bucket_->QueueInMs()); +} } // namespace test } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/pacing/pacing.gypi b/media/webrtc/trunk/webrtc/modules/pacing/pacing.gypi index a21f394ba0c9..07b433808fed 100644 --- a/media/webrtc/trunk/webrtc/modules/pacing/pacing.gypi +++ b/media/webrtc/trunk/webrtc/modules/pacing/pacing.gypi @@ -14,9 +14,6 @@ 'dependencies': [ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', ], - 'include_dirs': [ - 'include', - ], 'sources': [ 'include/paced_sender.h', 'paced_sender.cc', diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/bitrate_estimator.cc b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/bitrate_estimator.cc deleted file mode 100644 index 316297f03bab..000000000000 --- a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/bitrate_estimator.cc +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "webrtc/modules/remote_bitrate_estimator/bitrate_estimator.h" - -namespace webrtc { - -const float kBitrateAverageWindowMs = 500.0f; - -BitRateStats::BitRateStats() - : data_samples_(), - accumulated_bytes_(0) { -} - -BitRateStats::~BitRateStats() { - Init(); -} - -void BitRateStats::Init() { - accumulated_bytes_ = 0; - while (data_samples_.size() > 0) { - delete data_samples_.front(); - data_samples_.pop_front(); - } -} - -void BitRateStats::Update(uint32_t packet_size_bytes, int64_t now_ms) { - // Find an empty slot for storing the new sample and at the same time - // accumulate the history. - data_samples_.push_back(new DataTimeSizeTuple(packet_size_bytes, now_ms)); - accumulated_bytes_ += packet_size_bytes; - EraseOld(now_ms); -} - -void BitRateStats::EraseOld(int64_t now_ms) { - while (data_samples_.size() > 0) { - if (now_ms - data_samples_.front()->time_complete_ms > - kBitrateAverageWindowMs) { - // Delete old sample - accumulated_bytes_ -= data_samples_.front()->size_bytes; - delete data_samples_.front(); - data_samples_.pop_front(); - } else { - break; - } - } -} - -uint32_t BitRateStats::BitRate(int64_t now_ms) { - // Calculate the average bit rate the past BITRATE_AVERAGE_WINDOW ms. - // Removes any old samples from the list. - EraseOld(now_ms); - return static_cast(accumulated_bytes_ * 8.0f * 1000.0f / - kBitrateAverageWindowMs + 0.5f); -} -} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/bitrate_estimator.h b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/bitrate_estimator.h deleted file mode 100644 index 3d5d51b0e992..000000000000 --- a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/bitrate_estimator.h +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_BITRATE_ESTIMATOR_H_ -#define WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_BITRATE_ESTIMATOR_H_ - -#include - -#include "webrtc/typedefs.h" - -namespace webrtc { - -class BitRateStats { - public: - BitRateStats(); - ~BitRateStats(); - - void Init(); - void Update(uint32_t packet_size_bytes, int64_t now_ms); - uint32_t BitRate(int64_t now_ms); - - private: - struct DataTimeSizeTuple { - DataTimeSizeTuple(uint32_t size_bytes_in, int64_t time_complete_ms_in) - : size_bytes(size_bytes_in), - time_complete_ms(time_complete_ms_in) { - } - - uint32_t size_bytes; - int64_t time_complete_ms; - }; - - void EraseOld(int64_t now_ms); - - std::list data_samples_; - uint32_t accumulated_bytes_; -}; -} // namespace webrtc - -#endif // WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_BITRATE_ESTIMATOR_H_ diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/bitrate_estimator_unittest.cc b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/bitrate_estimator_unittest.cc deleted file mode 100644 index ca9da99c010c..000000000000 --- a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/bitrate_estimator_unittest.cc +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "testing/gtest/include/gtest/gtest.h" -#include "webrtc/modules/remote_bitrate_estimator/bitrate_estimator.h" - -namespace { - -using webrtc::BitRateStats; - -class BitRateStatsTest : public ::testing::Test { - protected: - BitRateStatsTest() {}; - BitRateStats stats_; -}; - -TEST_F(BitRateStatsTest, TestStrictMode) { - int64_t now_ms = 0; - // Should be initialized to 0. - EXPECT_EQ(0u, stats_.BitRate(now_ms)); - stats_.Update(1500, now_ms); - // Expecting 24 kbps given a 500 ms window with one 1500 bytes packet. - EXPECT_EQ(24000u, stats_.BitRate(now_ms)); - stats_.Init(); - // Expecting 0 after init. - EXPECT_EQ(0u, stats_.BitRate(now_ms)); - for (int i = 0; i < 100000; ++i) { - if (now_ms % 10 == 0) { - stats_.Update(1500, now_ms); - } - // Approximately 1200 kbps expected. Not exact since when packets - // are removed we will jump 10 ms to the next packet. - if (now_ms > 0 && now_ms % 500 == 0) { - EXPECT_NEAR(1200000u, stats_.BitRate(now_ms), 24000u); - } - now_ms += 1; - } - now_ms += 500; - // The window is 2 seconds. If nothing has been received for that time - // the estimate should be 0. - EXPECT_EQ(0u, stats_.BitRate(now_ms)); -} -} // namespace diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc new file mode 100644 index 000000000000..976f2a87d8c0 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc @@ -0,0 +1,87 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" +#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test.h" +#include "webrtc/test/testsupport/fileutils.h" + +namespace webrtc { +namespace testing { +namespace bwe { +#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE +std::vector SingleEstimatorConfig() { + static const RemoteBitrateEstimatorFactory factory = + AbsoluteSendTimeRemoteBitrateEstimatorFactory(); + + std::vector result; + result.push_back(BweTestConfig::EstimatorConfig("AST", &factory)); + return result; +} + +std::vector AdaptiveVideoSenderFactories( + uint32_t count) { + static const AdaptiveVideoPacketSenderFactory factories[] = { + AdaptiveVideoPacketSenderFactory(30.00f, 150, 0x1234, 0.13f), + AdaptiveVideoPacketSenderFactory(30.00f, 300, 0x3456, 0.26f), + AdaptiveVideoPacketSenderFactory(15.00f, 600, 0x4567, 0.39f), + }; + + assert(count <= sizeof(factories) / sizeof(factories[0])); + + std::vector result; + for (uint32_t i = 0; i < count; ++i) { + result.push_back(&factories[i]); + } + return result; +} + +BweTestConfig MakeAdaptiveBweTestConfig(uint32_t sender_count) { + BweTestConfig result = { + AdaptiveVideoSenderFactories(sender_count), SingleEstimatorConfig() + }; + return result; +} + +// This test fixture is used to instantiate tests running with adaptive video +// senders. +class BweSimulation : public BweTest { + public: + BweSimulation() : BweTest() {} + virtual ~BweSimulation() {} + + private: + DISALLOW_COPY_AND_ASSIGN(BweSimulation); +}; + +INSTANTIATE_TEST_CASE_P(VideoSendersTest, BweSimulation, + ::testing::Values(MakeAdaptiveBweTestConfig(1), + MakeAdaptiveBweTestConfig(3))); + +TEST_P(BweSimulation, SprintUplinkTest) { + VerboseLogging(true); + RateCounterFilter counter1(this, "sender_output"); + TraceBasedDeliveryFilter filter(this, "link_capacity"); + RateCounterFilter counter2(this, "receiver_input"); + ASSERT_TRUE(filter.Init(test::ResourcePath("sprint-uplink", "rx"))); + RunFor(60 * 1000); +} + +TEST_P(BweSimulation, Verizon4gDownlinkTest) { + VerboseLogging(true); + RateCounterFilter counter1(this, "sender_output"); + TraceBasedDeliveryFilter filter(this, "link_capacity"); + RateCounterFilter counter2(this, "receiver_input"); + ASSERT_TRUE(filter.Init(test::ResourcePath("verizon4g-downlink", "rx"))); + RunFor(22 * 60 * 1000); +} +#endif // BWE_TEST_LOGGING_COMPILE_TIME_ENABLE +} // namespace bwe +} // namespace testing +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h index a2f6bcfd6055..a68879acdedc 100644 --- a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h @@ -37,6 +37,27 @@ class RemoteBitrateObserver { virtual ~RemoteBitrateObserver() {} }; +struct ReceiveBandwidthEstimatorStats { + ReceiveBandwidthEstimatorStats() : total_propagation_time_delta_ms(0) {} + + // The "propagation_time_delta" of a frame is defined as (d_arrival - d_sent), + // where d_arrival is the delta of the arrival times of the frame and the + // previous frame, d_sent is the delta of the sent times of the frame and + // the previous frame. The sent time is calculated from the RTP timestamp. + + // |total_propagation_time_delta_ms| is the sum of the propagation_time_deltas + // of all received frames, except that it's is adjusted to 0 when it becomes + // negative. + int total_propagation_time_delta_ms; + // The propagation_time_deltas for the frames arrived in the last + // kProcessIntervalMs using the clock passed to + // RemoteBitrateEstimatorFactory::Create. + std::vector recent_propagation_time_delta_ms; + // The arrival times for the frames arrived in the last kProcessIntervalMs + // using the clock passed to RemoteBitrateEstimatorFactory::Create. + std::vector recent_arrival_time_ms; +}; + class RemoteBitrateEstimator : public CallStatsObserver, public Module { public: virtual ~RemoteBitrateEstimator() {} @@ -58,6 +79,9 @@ class RemoteBitrateEstimator : public CallStatsObserver, public Module { virtual bool LatestEstimate(std::vector* ssrcs, unsigned int* bitrate_bps) const = 0; + // Returns true if the statistics are available. + virtual bool GetStats(ReceiveBandwidthEstimatorStats* output) const = 0; + protected: static const int kProcessIntervalMs = 1000; static const int kStreamTimeOutMs = 2000; @@ -69,16 +93,19 @@ struct RemoteBitrateEstimatorFactory { virtual RemoteBitrateEstimator* Create( RemoteBitrateObserver* observer, - Clock* clock) const; + Clock* clock, + uint32_t min_bitrate_bps) const; }; -struct AbsoluteSendTimeRemoteBitrateEstimatorFactory { +struct AbsoluteSendTimeRemoteBitrateEstimatorFactory + : public RemoteBitrateEstimatorFactory { AbsoluteSendTimeRemoteBitrateEstimatorFactory() {} virtual ~AbsoluteSendTimeRemoteBitrateEstimatorFactory() {} virtual RemoteBitrateEstimator* Create( RemoteBitrateObserver* observer, - Clock* clock) const; + Clock* clock, + uint32_t min_bitrate_bps) const; }; } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/rate_statistics.cc b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/rate_statistics.cc new file mode 100644 index 000000000000..4a9b4488108e --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/rate_statistics.cc @@ -0,0 +1,83 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/remote_bitrate_estimator/rate_statistics.h" + +namespace webrtc { + +RateStatistics::RateStatistics(uint32_t window_size_ms, float scale) + : num_buckets_(window_size_ms + 1), // N ms in (N+1) buckets. + buckets_(new uint32_t[num_buckets_]()), + accumulated_count_(0), + oldest_time_(0), + oldest_index_(0), + scale_(scale / (num_buckets_ - 1)) { +} + +RateStatistics::~RateStatistics() { +} + +void RateStatistics::Reset() { + accumulated_count_ = 0; + oldest_time_ = 0; + oldest_index_ = 0; + for (int i = 0; i < num_buckets_; i++) { + buckets_[i] = 0; + } +} + +void RateStatistics::Update(uint32_t count, int64_t now_ms) { + if (now_ms < oldest_time_) { + // Too old data is ignored. + return; + } + + EraseOld(now_ms); + + int now_offset = static_cast(now_ms - oldest_time_); + assert(now_offset < num_buckets_); + int index = oldest_index_ + now_offset; + if (index >= num_buckets_) { + index -= num_buckets_; + } + buckets_[index] += count; + accumulated_count_ += count; +} + +uint32_t RateStatistics::Rate(int64_t now_ms) { + EraseOld(now_ms); + return static_cast(accumulated_count_ * scale_ + 0.5f); +} + +void RateStatistics::EraseOld(int64_t now_ms) { + int64_t new_oldest_time = now_ms - num_buckets_ + 1; + if (new_oldest_time <= oldest_time_) { + return; + } + + while (oldest_time_ < new_oldest_time) { + uint32_t count_in_oldest_bucket = buckets_[oldest_index_]; + assert(accumulated_count_ >= count_in_oldest_bucket); + accumulated_count_ -= count_in_oldest_bucket; + buckets_[oldest_index_] = 0; + if (++oldest_index_ >= num_buckets_) { + oldest_index_ = 0; + } + ++oldest_time_; + if (accumulated_count_ == 0) { + // This guarantees we go through all the buckets at most once, even if + // |new_oldest_time| is far greater than |oldest_time_|. + break; + } + } + oldest_time_ = new_oldest_time; +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/rate_statistics.h b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/rate_statistics.h new file mode 100644 index 000000000000..429669059a2d --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/rate_statistics.h @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_RATE_STATISTICS_H_ +#define WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_RATE_STATISTICS_H_ + +#include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/typedefs.h" + +namespace webrtc { + +class RateStatistics { + public: + // window_size = window size in ms for the rate estimation + // scale = coefficient to convert counts/ms to desired units, + // ex: if counts represents bytes, use 8*1000 to go to bits/s + RateStatistics(uint32_t window_size_ms, float scale); + ~RateStatistics(); + + void Reset(); + void Update(uint32_t count, int64_t now_ms); + uint32_t Rate(int64_t now_ms); + + private: + void EraseOld(int64_t now_ms); + + // Counters are kept in buckets (circular buffer), with one bucket + // per millisecond. + const int num_buckets_; + scoped_array buckets_; + + // Total count recorded in buckets. + uint32_t accumulated_count_; + + // Oldest time recorded in buckets. + int64_t oldest_time_; + + // Bucket index of oldest counter recorded in buckets. + int oldest_index_; + + // To convert counts/ms to desired units + const float scale_; +}; +} // namespace webrtc + +#endif // WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_RATE_STATISTICS_H_ diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/rate_statistics_unittest.cc b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/rate_statistics_unittest.cc new file mode 100644 index 000000000000..0cbab30bc139 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/rate_statistics_unittest.cc @@ -0,0 +1,97 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "testing/gtest/include/gtest/gtest.h" +#include "webrtc/modules/remote_bitrate_estimator/rate_statistics.h" + +namespace { + +using webrtc::RateStatistics; + +class RateStatisticsTest : public ::testing::Test { + protected: + RateStatisticsTest() : stats_(500, 8000) {} + RateStatistics stats_; +}; + +TEST_F(RateStatisticsTest, TestStrictMode) { + int64_t now_ms = 0; + // Should be initialized to 0. + EXPECT_EQ(0u, stats_.Rate(now_ms)); + stats_.Update(1500, now_ms); + // Expecting 24 kbps given a 500 ms window with one 1500 bytes packet. + EXPECT_EQ(24000u, stats_.Rate(now_ms)); + stats_.Reset(); + // Expecting 0 after init. + EXPECT_EQ(0u, stats_.Rate(now_ms)); + for (int i = 0; i < 100000; ++i) { + if (now_ms % 10 == 0) { + stats_.Update(1500, now_ms); + } + // Approximately 1200 kbps expected. Not exact since when packets + // are removed we will jump 10 ms to the next packet. + if (now_ms > 0 && now_ms % 500 == 0) { + EXPECT_NEAR(1200000u, stats_.Rate(now_ms), 24000u); + } + now_ms += 1; + } + now_ms += 500; + // The window is 2 seconds. If nothing has been received for that time + // the estimate should be 0. + EXPECT_EQ(0u, stats_.Rate(now_ms)); +} + +TEST_F(RateStatisticsTest, IncreasingThenDecreasingBitrate) { + int64_t now_ms = 0; + stats_.Reset(); + // Expecting 0 after init. + uint32_t bitrate = stats_.Rate(now_ms); + EXPECT_EQ(0u, bitrate); + // 1000 bytes per millisecond until plateau is reached. + while (++now_ms < 10000) { + stats_.Update(1000, now_ms); + uint32_t new_bitrate = stats_.Rate(now_ms); + if (new_bitrate != bitrate) { + // New bitrate must be higher than previous one. + EXPECT_GT(new_bitrate, bitrate); + } else { + // Plateau reached, 8000 kbps expected. + EXPECT_NEAR(8000000u, bitrate, 80000u); + break; + } + bitrate = new_bitrate; + } + // 1000 bytes per millisecond until 10-second mark, 8000 kbps expected. + while (++now_ms < 10000) { + stats_.Update(1000, now_ms); + bitrate = stats_.Rate(now_ms); + EXPECT_NEAR(8000000u, bitrate, 80000u); + } + // Zero bytes per millisecond until 0 is reached. + while (++now_ms < 20000) { + stats_.Update(0, now_ms); + uint32_t new_bitrate = stats_.Rate(now_ms); + if (new_bitrate != bitrate) { + // New bitrate must be lower than previous one. + EXPECT_LT(new_bitrate, bitrate); + } else { + // 0 kbps expected. + EXPECT_EQ(0u, bitrate); + break; + } + bitrate = new_bitrate; + } + // Zero bytes per millisecond until 20-second mark, 0 kbps expected. + while (++now_ms < 20000) { + stats_.Update(0, now_ms); + EXPECT_EQ(0u, stats_.Rate(now_ms)); + } +} +} // namespace diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi index 9c8efdebed58..810da4622b85 100644 --- a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi @@ -18,18 +18,69 @@ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', '<(rbe_components_path)/remote_bitrate_estimator_components.gyp:rbe_components', ], + 'sources': [ + 'include/bwe_defines.h', + 'include/remote_bitrate_estimator.h', + 'include/rtp_to_ntp.h', + 'rate_statistics.cc', + 'rate_statistics.h', + 'rtp_to_ntp.cc', + ], # source + }, + { + 'target_name': 'bwe_tools_util', + 'type': 'static_library', + 'dependencies': [ + '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', + 'rtp_rtcp', + ], + 'sources': [ + 'tools/bwe_rtp.cc', + 'tools/bwe_rtp.h', + ], + }, + { + 'target_name': 'bwe_rtp_to_text', + 'type': 'executable', + 'includes': [ + '../rtp_rtcp/source/rtp_rtcp.gypi', + ], + 'dependencies': [ + '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', + 'bwe_tools_util', + 'rtp_rtcp', + ], 'direct_dependent_settings': { 'include_dirs': [ 'include', ], }, 'sources': [ - 'include/bwe_defines.h', - 'include/remote_bitrate_estimator.h', - 'include/rtp_to_ntp.h', - 'bitrate_estimator.cc', - 'bitrate_estimator.h', - 'rtp_to_ntp.cc', + 'tools/rtp_to_text.cc', + '<(webrtc_root)/modules/video_coding/main/test/rtp_file_reader.cc', + '<(webrtc_root)/modules/video_coding/main/test/rtp_file_reader.h', + ], # source + }, + { + 'target_name': 'bwe_rtp_play', + 'type': 'executable', + 'includes': [ + '../rtp_rtcp/source/rtp_rtcp.gypi', + ], + 'dependencies': [ + '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', + 'bwe_tools_util', + 'rtp_rtcp', + ], + 'direct_dependent_settings': { + 'include_dirs': [ + 'include', + ], + }, + 'sources': [ + 'tools/bwe_rtp_play.cc', + '<(webrtc_root)/modules/video_coding/main/test/rtp_file_reader.cc', + '<(webrtc_root)/modules/video_coding/main/test/rtp_file_reader.h', ], # source }, ], # targets diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_components.gyp b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_components.gyp index c5eca0535761..9f24f22b4427 100644 --- a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_components.gyp +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_components.gyp @@ -14,9 +14,6 @@ { 'target_name': 'rbe_components', 'type': 'static_library', - 'include_dirs': [ - '<(webrtc_root)/modules/remote_bitrate_estimator', - ], 'sources': [ 'overuse_detector.cc', 'overuse_detector.h', diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc index 8eec3428368f..b0f906487c94 100644 --- a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc @@ -10,13 +10,14 @@ #include -#include "webrtc/modules/remote_bitrate_estimator/bitrate_estimator.h" +#include "webrtc/modules/remote_bitrate_estimator/rate_statistics.h" #include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" #include "webrtc/modules/remote_bitrate_estimator/overuse_detector.h" #include "webrtc/modules/remote_bitrate_estimator/remote_rate_control.h" #include "webrtc/system_wrappers/interface/clock.h" #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/system_wrappers/interface/trace.h" #include "webrtc/typedefs.h" namespace webrtc { @@ -24,7 +25,8 @@ namespace { class RemoteBitrateEstimatorSingleStream : public RemoteBitrateEstimator { public: RemoteBitrateEstimatorSingleStream(RemoteBitrateObserver* observer, - Clock* clock); + Clock* clock, + uint32_t min_bitrate_bps); virtual ~RemoteBitrateEstimatorSingleStream() {} // Called for each incoming packet. If this is a new SSRC, a new @@ -53,6 +55,9 @@ class RemoteBitrateEstimatorSingleStream : public RemoteBitrateEstimator { virtual bool LatestEstimate(std::vector* ssrcs, unsigned int* bitrate_bps) const OVERRIDE; + virtual bool GetStats( + ReceiveBandwidthEstimatorStats* output) const OVERRIDE; + private: typedef std::map SsrcOveruseDetectorMap; @@ -63,7 +68,7 @@ class RemoteBitrateEstimatorSingleStream : public RemoteBitrateEstimator { Clock* clock_; SsrcOveruseDetectorMap overuse_detectors_; - BitRateStats incoming_bitrate_; + RateStatistics incoming_bitrate_; RemoteRateControl remote_rate_; RemoteBitrateObserver* observer_; scoped_ptr crit_sect_; @@ -72,8 +77,11 @@ class RemoteBitrateEstimatorSingleStream : public RemoteBitrateEstimator { RemoteBitrateEstimatorSingleStream::RemoteBitrateEstimatorSingleStream( RemoteBitrateObserver* observer, - Clock* clock) + Clock* clock, + uint32_t min_bitrate_bps) : clock_(clock), + incoming_bitrate_(500, 8000), + remote_rate_(min_bitrate_bps), observer_(observer), crit_sect_(CriticalSectionWrapper::CreateCriticalSection()), last_process_time_(-1) { @@ -106,7 +114,7 @@ void RemoteBitrateEstimatorSingleStream::IncomingPacket( const BandwidthUsage prior_state = overuse_detector->State(); overuse_detector->Update(payload_size, -1, rtp_timestamp, arrival_time_ms); if (overuse_detector->State() == kBwOverusing) { - unsigned int incoming_bitrate = incoming_bitrate_.BitRate(arrival_time_ms); + unsigned int incoming_bitrate = incoming_bitrate_.Rate(arrival_time_ms); if (prior_state != kBwOverusing || remote_rate_.TimeToReduceFurther(arrival_time_ms, incoming_bitrate)) { // The first overuse should immediately trigger a new estimate. @@ -164,7 +172,7 @@ void RemoteBitrateEstimatorSingleStream::UpdateEstimate(int64_t time_now) { double mean_noise_var = sum_noise_var / static_cast(overuse_detectors_.size()); const RateControlInput input(bw_state, - incoming_bitrate_.BitRate(time_now), + incoming_bitrate_.Rate(time_now), mean_noise_var); const RateControlRegion region = remote_rate_.Update(&input, time_now); unsigned int target_bitrate = remote_rate_.UpdateBandwidthEstimate(time_now); @@ -205,6 +213,12 @@ bool RemoteBitrateEstimatorSingleStream::LatestEstimate( return true; } +bool RemoteBitrateEstimatorSingleStream::GetStats( + ReceiveBandwidthEstimatorStats* output) const { + // Not implemented. + return false; +} + void RemoteBitrateEstimatorSingleStream::GetSsrcs( std::vector* ssrcs) const { assert(ssrcs); @@ -219,13 +233,21 @@ void RemoteBitrateEstimatorSingleStream::GetSsrcs( RemoteBitrateEstimator* RemoteBitrateEstimatorFactory::Create( RemoteBitrateObserver* observer, - Clock* clock) const { - return new RemoteBitrateEstimatorSingleStream(observer, clock); + Clock* clock, + uint32_t min_bitrate_bps) const { + WEBRTC_TRACE(kTraceStateInfo, kTraceRemoteBitrateEstimator, -1, + "RemoteBitrateEstimatorFactory: Instantiating."); + return new RemoteBitrateEstimatorSingleStream(observer, clock, + min_bitrate_bps); } RemoteBitrateEstimator* AbsoluteSendTimeRemoteBitrateEstimatorFactory::Create( RemoteBitrateObserver* observer, - Clock* clock) const { - return new RemoteBitrateEstimatorSingleStream(observer, clock); + Clock* clock, + uint32_t min_bitrate_bps) const { + WEBRTC_TRACE(kTraceStateInfo, kTraceRemoteBitrateEstimator, -1, + "AbsoluteSendTimeRemoteBitrateEstimatorFactory: Instantiating."); + return new RemoteBitrateEstimatorSingleStream(observer, clock, + min_bitrate_bps); } } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc index 69cb38a7c806..a3e44d87ef52 100644 --- a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc @@ -17,11 +17,14 @@ namespace webrtc { class RemoteBitrateEstimatorSingleTest : public RemoteBitrateEstimatorTest { public: + static const uint32_t kRemoteBitrateEstimatorMinBitrateBps = 30000; + RemoteBitrateEstimatorSingleTest() {} virtual void SetUp() { bitrate_estimator_.reset(RemoteBitrateEstimatorFactory().Create( bitrate_observer_.get(), - &clock_)); + &clock_, + kRemoteBitrateEstimatorMinBitrateBps)); } protected: DISALLOW_COPY_AND_ASSIGN(RemoteBitrateEstimatorSingleTest); diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc new file mode 100644 index 000000000000..3d86fbb1f86c --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc @@ -0,0 +1,214 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" +#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test.h" +#include "webrtc/test/testsupport/fileutils.h" + +namespace webrtc { +namespace testing { +namespace bwe { +std::vector VideoSenderFactories(uint32_t count) { + static const VideoPacketSenderFactory factories[] = { + VideoPacketSenderFactory(30.00f, 150, 0x1234, 0.13f), + VideoPacketSenderFactory(15.00f, 500, 0x2345, 0.16f), + VideoPacketSenderFactory(30.00f, 1200, 0x3456, 0.26f), + VideoPacketSenderFactory(7.49f, 150, 0x4567, 0.05f), + VideoPacketSenderFactory(7.50f, 150, 0x5678, 0.15f), + VideoPacketSenderFactory(7.51f, 150, 0x6789, 0.25f), + VideoPacketSenderFactory(15.02f, 150, 0x7890, 0.27f), + VideoPacketSenderFactory(15.03f, 150, 0x8901, 0.38f), + VideoPacketSenderFactory(30.02f, 150, 0x9012, 0.39f), + VideoPacketSenderFactory(30.03f, 150, 0x0123, 0.52f) + }; + + assert(count <= sizeof(factories) / sizeof(factories[0])); + + std::vector result; + for (uint32_t i = 0; i < count; ++i) { + result.push_back(&factories[i]); + } + + return result; +} + +std::vector EstimatorConfigs() { + static const RemoteBitrateEstimatorFactory factories[] = { + RemoteBitrateEstimatorFactory(), + AbsoluteSendTimeRemoteBitrateEstimatorFactory() + }; + + std::vector result; + result.push_back(BweTestConfig::EstimatorConfig("TOF", &factories[0])); + result.push_back(BweTestConfig::EstimatorConfig("AST", &factories[1])); + return result; +} + +BweTestConfig MakeBweTestConfig(uint32_t sender_count) { + BweTestConfig result = { + VideoSenderFactories(sender_count), EstimatorConfigs() + }; + return result; +} + +INSTANTIATE_TEST_CASE_P(VideoSendersTest, BweTest, + ::testing::Values(MakeBweTestConfig(1), + MakeBweTestConfig(3))); + +TEST_P(BweTest, UnlimitedSpeed) { + VerboseLogging(false); + RunFor(10 * 60 * 1000); +} + +TEST_P(BweTest, SteadyLoss) { + LossFilter loss(this); + loss.SetLoss(20.0); + RunFor(10 * 60 * 1000); +} + +TEST_P(BweTest, IncreasingLoss1) { + LossFilter loss(this); + for (int i = 0; i < 76; ++i) { + loss.SetLoss(i); + RunFor(5000); + } +} + +TEST_P(BweTest, SteadyDelay) { + DelayFilter delay(this); + delay.SetDelay(1000); + RunFor(10 * 60 * 1000); +} + +TEST_P(BweTest, IncreasingDelay1) { + DelayFilter delay(this); + RunFor(10 * 60 * 1000); + for (int i = 0; i < 30 * 2; ++i) { + delay.SetDelay(i); + RunFor(10 * 1000); + } + RunFor(10 * 60 * 1000); +} + +TEST_P(BweTest, IncreasingDelay2) { + DelayFilter delay(this); + RateCounterFilter counter(this); + RunFor(1 * 60 * 1000); + for (int i = 1; i < 51; ++i) { + delay.SetDelay(10.0f * i); + RunFor(10 * 1000); + } + delay.SetDelay(0.0f); + RunFor(10 * 60 * 1000); +} + +TEST_P(BweTest, JumpyDelay1) { + DelayFilter delay(this); + RunFor(10 * 60 * 1000); + for (int i = 1; i < 200; ++i) { + delay.SetDelay((10 * i) % 500); + RunFor(1000); + delay.SetDelay(1.0f); + RunFor(1000); + } + delay.SetDelay(0.0f); + RunFor(10 * 60 * 1000); +} + +TEST_P(BweTest, SteadyJitter) { + JitterFilter jitter(this); + RateCounterFilter counter(this); + jitter.SetJitter(20); + RunFor(2 * 60 * 1000); +} + +TEST_P(BweTest, IncreasingJitter1) { + JitterFilter jitter(this); + for (int i = 0; i < 2 * 60 * 2; ++i) { + jitter.SetJitter(i); + RunFor(10 * 1000); + } + RunFor(10 * 60 * 1000); +} + +TEST_P(BweTest, IncreasingJitter2) { + JitterFilter jitter(this); + RunFor(30 * 1000); + for (int i = 1; i < 51; ++i) { + jitter.SetJitter(10.0f * i); + RunFor(10 * 1000); + } + jitter.SetJitter(0.0f); + RunFor(10 * 60 * 1000); +} + +TEST_P(BweTest, SteadyReorder) { + ReorderFilter reorder(this); + reorder.SetReorder(20.0); + RunFor(10 * 60 * 1000); +} + +TEST_P(BweTest, IncreasingReorder1) { + ReorderFilter reorder(this); + for (int i = 0; i < 76; ++i) { + reorder.SetReorder(i); + RunFor(5000); + } +} + +TEST_P(BweTest, SteadyChoke) { + ChokeFilter choke(this); + choke.SetCapacity(140); + RunFor(10 * 60 * 1000); +} + +TEST_P(BweTest, IncreasingChoke1) { + ChokeFilter choke(this); + for (int i = 1200; i >= 100; i -= 100) { + choke.SetCapacity(i); + RunFor(5000); + } +} + +TEST_P(BweTest, IncreasingChoke2) { + ChokeFilter choke(this); + RunFor(60 * 1000); + for (int i = 1200; i >= 100; i -= 20) { + choke.SetCapacity(i); + RunFor(1000); + } +} + +TEST_P(BweTest, Multi1) { + DelayFilter delay(this); + ChokeFilter choke(this); + RateCounterFilter counter(this); + choke.SetCapacity(1000); + RunFor(1 * 60 * 1000); + for (int i = 1; i < 51; ++i) { + delay.SetDelay(100.0f * i); + RunFor(10 * 1000); + } + RunFor(500 * 1000); + delay.SetDelay(0.0f); + RunFor(5 * 60 * 1000); +} + +TEST_P(BweTest, Multi2) { + ChokeFilter choke(this); + JitterFilter jitter(this); + RateCounterFilter counter(this); + choke.SetCapacity(2000); + jitter.SetJitter(120); + RunFor(5 * 60 * 1000); +} +} // namespace bwe +} // namespace testing +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_rate_control.cc b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_rate_control.cc index 1f5fe42bce65..994abdbee899 100644 --- a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_rate_control.cc +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_rate_control.cc @@ -22,8 +22,8 @@ namespace webrtc { const unsigned int kDefaultRttMs = 200; -RemoteRateControl::RemoteRateControl() - : min_configured_bit_rate_(30000), +RemoteRateControl::RemoteRateControl(uint32_t min_bitrate_bps) + : min_configured_bit_rate_(min_bitrate_bps), max_configured_bit_rate_(30000000), current_bit_rate_(max_configured_bit_rate_), max_hold_rate_(0), @@ -45,7 +45,7 @@ RemoteRateControl::RemoteRateControl() } void RemoteRateControl::Reset() { - *this = RemoteRateControl(); + *this = RemoteRateControl(min_configured_bit_rate_); came_from_state_ = kRcHold; } diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_rate_control.h b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_rate_control.h index 87a57217a574..b525834eca1b 100644 --- a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_rate_control.h +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_rate_control.h @@ -17,7 +17,7 @@ namespace webrtc { class RemoteRateControl { public: - RemoteRateControl(); + explicit RemoteRateControl(uint32_t min_bitrate_bps); ~RemoteRateControl() {} void Reset(); diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/rtp_to_ntp.cc b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/rtp_to_ntp.cc index 5ed38c9fc598..109edae7cc5c 100644 --- a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/rtp_to_ntp.cc +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/rtp_to_ntp.cc @@ -18,8 +18,6 @@ namespace webrtc { namespace synchronization { -const double kNtpFracPerMs = 4.294967296E6; - RtcpMeasurement::RtcpMeasurement() : ntp_secs(0), ntp_frac(0), rtp_timestamp(0) {} diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_plot.sh b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_plot.sh new file mode 100755 index 000000000000..4695af45cd74 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_plot.sh @@ -0,0 +1,63 @@ +#!/bin/bash + +# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +# To set up in e.g. Eclipse, run a separate shell and pipe the output from the +# test into this script. +# +# In Eclipse, that amounts to creating a Run Configuration which starts +# "/bin/bash" with the arguments "-c [trunk_path]/out/Debug/modules_unittests +# --gtest_filter=*BweTest* | [trunk_path]/webrtc/modules/ +# remote_bitrate_estimator/bwe_plot. + +# bwe_plot.sh has a single y axis and a dual y axis mode. If any line specifies +# a an axis by ending with "#" two y axis will be used, +# the first will be assumed to represent bitrate (in kbps) and the second will +# be assumed to represent time deltas (in ms). + +log=$( void DeleteElements(T* container) { + if (!container) return; + for (typename T::iterator it = container->begin(); it != container->end(); + ++it) { + delete *it; + } + container->clear(); +} +} // namespace stl_helpers + +class BweTest::TestedEstimator : public RemoteBitrateObserver { + public: + static const uint32_t kRemoteBitrateEstimatorMinBitrateBps = 30000; + + TestedEstimator(const string& test_name, + const BweTestConfig::EstimatorConfig& config) + : debug_name_(config.debug_name), + clock_(0), + stats_(), + relative_estimator_stats_(), + latest_estimate_bps_(-1), + estimator_(config.estimator_factory->Create( + this, &clock_, kRemoteBitrateEstimatorMinBitrateBps)), + relative_estimator_(NULL), + baseline_(BaseLineFileInterface::Create(test_name + "_" + debug_name_, + config.update_baseline)) { + assert(estimator_.get()); + assert(baseline_.get()); + // Default RTT in RemoteRateControl is 200 ms ; 50 ms is more realistic. + estimator_->OnRttUpdate(50); + } + + void SetRelativeEstimator(TestedEstimator* relative_estimator) { + relative_estimator_ = relative_estimator; + } + + void EatPacket(const Packet& packet) { + BWE_TEST_LOGGING_CONTEXT(debug_name_); + + latest_estimate_bps_ = -1; + + // We're treating the send time (from previous filter) as the arrival + // time once packet reaches the estimator. + int64_t packet_time_ms = (packet.send_time_us() + 500) / 1000; + BWE_TEST_LOGGING_TIME(packet_time_ms); + BWE_TEST_LOGGING_PLOT("Delay_#2", clock_.TimeInMilliseconds(), + packet_time_ms - + (packet.creation_time_us() + 500) / 1000); + + int64_t step_ms = estimator_->TimeUntilNextProcess(); + while ((clock_.TimeInMilliseconds() + step_ms) < packet_time_ms) { + clock_.AdvanceTimeMilliseconds(step_ms); + estimator_->Process(); + step_ms = estimator_->TimeUntilNextProcess(); + } + estimator_->IncomingPacket(packet_time_ms, packet.payload_size(), + packet.header()); + clock_.AdvanceTimeMilliseconds(packet_time_ms - + clock_.TimeInMilliseconds()); + ASSERT_TRUE(packet_time_ms == clock_.TimeInMilliseconds()); + } + + bool CheckEstimate(PacketSender::Feedback* feedback) { + assert(feedback); + BWE_TEST_LOGGING_CONTEXT(debug_name_); + uint32_t estimated_bps = 0; + if (LatestEstimate(&estimated_bps)) { + feedback->estimated_bps = estimated_bps; + baseline_->Estimate(clock_.TimeInMilliseconds(), estimated_bps); + + double estimated_kbps = static_cast(estimated_bps) / 1000.0; + stats_.Push(estimated_kbps); + BWE_TEST_LOGGING_PLOT("Estimate_#1", clock_.TimeInMilliseconds(), + estimated_kbps); + uint32_t relative_estimate_bps = 0; + if (relative_estimator_ && + relative_estimator_->LatestEstimate(&relative_estimate_bps)) { + double relative_estimate_kbps = + static_cast(relative_estimate_bps) / 1000.0; + relative_estimator_stats_.Push(estimated_kbps - relative_estimate_kbps); + } + return true; + } + return false; + } + + void LogStats() { + BWE_TEST_LOGGING_CONTEXT(debug_name_); + BWE_TEST_LOGGING_CONTEXT("Mean"); + stats_.Log("kbps"); + if (relative_estimator_) { + BWE_TEST_LOGGING_CONTEXT("Diff"); + relative_estimator_stats_.Log("kbps"); + } + } + + void VerifyOrWriteBaseline() { + EXPECT_TRUE(baseline_->VerifyOrWrite()); + } + + virtual void OnReceiveBitrateChanged(const vector& ssrcs, + unsigned int bitrate) { + } + + private: + bool LatestEstimate(uint32_t* estimate_bps) { + if (latest_estimate_bps_ < 0) { + vector ssrcs; + unsigned int bps = 0; + if (!estimator_->LatestEstimate(&ssrcs, &bps)) { + return false; + } + latest_estimate_bps_ = bps; + } + *estimate_bps = latest_estimate_bps_; + return true; + } + + string debug_name_; + SimulatedClock clock_; + Stats stats_; + Stats relative_estimator_stats_; + int64_t latest_estimate_bps_; + scoped_ptr estimator_; + TestedEstimator* relative_estimator_; + scoped_ptr baseline_; + + DISALLOW_IMPLICIT_CONSTRUCTORS(TestedEstimator); +}; + +BweTest::BweTest() + : run_time_ms_(0), + simulation_interval_ms_(-1), + previous_packets_(), + packet_senders_(), + estimators_(), + processors_() { +} + +BweTest::~BweTest() { + stl_helpers::DeleteElements(&estimators_); + stl_helpers::DeleteElements(&packet_senders_); +} + +void BweTest::SetUp() { + const ::testing::TestInfo* const test_info = + ::testing::UnitTest::GetInstance()->current_test_info(); + string test_name = + string(test_info->test_case_name()) + "_" + string(test_info->name()); + BWE_TEST_LOGGING_GLOBAL_CONTEXT(test_name); + + const BweTestConfig& config = GetParam(); + + uint32_t total_capacity = 0; + for (vector::const_iterator it = + config.sender_factories.begin(); it != config.sender_factories.end(); + ++it) { + PacketSender* sender = (*it)->Create(); + assert(sender); + total_capacity += sender->GetCapacityKbps(); + packet_senders_.push_back(sender); + processors_.push_back(sender); + } + BWE_TEST_LOGGING_LOG1("RequiredLinkCapacity", "%d kbps", total_capacity) + + // Set simulation interval from first packet sender. + if (packet_senders_.size() > 0) { + simulation_interval_ms_ = packet_senders_[0]->GetFeedbackIntervalMs(); + } + + for (vector:: const_iterator it = + config.estimator_configs.begin(); it != config.estimator_configs.end(); + ++it) { + estimators_.push_back(new TestedEstimator(test_name, *it)); + } + if (estimators_.size() > 1) { + // Set all estimators as relative to the first one. + for (uint32_t i = 1; i < estimators_.size(); ++i) { + estimators_[i]->SetRelativeEstimator(estimators_[0]); + } + } + + BWE_TEST_LOGGING_GLOBAL_ENABLE(false); +} + +void BweTest::TearDown() { + BWE_TEST_LOGGING_GLOBAL_ENABLE(true); + + for (vector::iterator eit = estimators_.begin(); + eit != estimators_.end(); ++eit) { + (*eit)->VerifyOrWriteBaseline(); + (*eit)->LogStats(); + } + + BWE_TEST_LOGGING_GLOBAL_CONTEXT(""); +} + +void BweTest::AddPacketProcessor( + PacketProcessor* processor) { + assert(processor); + processors_.push_back(processor); +} + +void BweTest::RemovePacketProcessor( + PacketProcessor* processor) { + vector::iterator it = + std::find(processors_.begin(), processors_.end(), processor); + assert(it != processors_.end()); + processors_.erase(it); +} + +void BweTest::VerboseLogging(bool enable) { + BWE_TEST_LOGGING_GLOBAL_ENABLE(enable); +} + +void BweTest::RunFor(int64_t time_ms) { + for (run_time_ms_ += time_ms; run_time_ms_ >= simulation_interval_ms_; + run_time_ms_ -= simulation_interval_ms_) { + Packets packets; + for (vector::const_iterator it = + processors_.begin(); it != processors_.end(); ++it) { + (*it)->RunFor(simulation_interval_ms_, &packets); + (*it)->Plot((packets.back().send_time_us() + 500) / 1000); + } + + // Verify packets are in order between batches. + if (!packets.empty() && !previous_packets_.empty()) { + packets.splice(packets.begin(), previous_packets_, + --previous_packets_.end()); + ASSERT_TRUE(IsTimeSorted(packets)); + packets.erase(packets.begin()); + } else { + ASSERT_TRUE(IsTimeSorted(packets)); + } + + for (PacketsConstIt pit = packets.begin(); pit != packets.end(); ++pit) { + for (vector::iterator eit = estimators_.begin(); + eit != estimators_.end(); ++eit) { + (*eit)->EatPacket(*pit); + } + } + + previous_packets_.swap(packets); + + for (vector::iterator eit = estimators_.begin(); + eit != estimators_.end(); ++eit) { + PacketSender::Feedback feedback = {0}; + if ((*eit)->CheckEstimate(&feedback)) { + for (vector::iterator psit = packet_senders_.begin(); + psit != packet_senders_.end(); ++psit) { + (*psit)->GiveFeedback(feedback); + } + } + } + } +} +} // namespace bwe +} // namespace testing +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test.h b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test.h new file mode 100644 index 000000000000..286720b0c2dc --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test.h @@ -0,0 +1,82 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include +#include "gtest/gtest.h" +#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.h" +#include "webrtc/system_wrappers/interface/constructor_magic.h" + +namespace webrtc { + +struct RemoteBitrateEstimatorFactory; + +namespace testing { +namespace bwe { + +struct BweTestConfig { + struct EstimatorConfig { + EstimatorConfig() + : debug_name(), + estimator_factory(NULL), + update_baseline(false) { + } + EstimatorConfig(std::string debug_name, + const RemoteBitrateEstimatorFactory* estimator_factory) + : debug_name(debug_name), + estimator_factory(estimator_factory), + update_baseline(false) { + } + EstimatorConfig(std::string debug_name, + const RemoteBitrateEstimatorFactory* estimator_factory, + bool update_baseline) + : debug_name(debug_name), + estimator_factory(estimator_factory), + update_baseline(update_baseline) { + } + std::string debug_name; + const RemoteBitrateEstimatorFactory* estimator_factory; + bool update_baseline; + }; + + std::vector sender_factories; + std::vector estimator_configs; +}; + +class BweTest : public ::testing::TestWithParam, + public PacketProcessorListener { + public: + BweTest(); + virtual ~BweTest(); + + virtual void SetUp(); + virtual void TearDown(); + virtual void AddPacketProcessor(PacketProcessor* processor); + virtual void RemovePacketProcessor(PacketProcessor* processor); + + protected: + void VerboseLogging(bool enable); + void RunFor(int64_t time_ms); + + private: + class TestedEstimator; + + int64_t run_time_ms_; + int64_t simulation_interval_ms_; + Packets previous_packets_; + std::vector packet_senders_; + std::vector estimators_; + std::vector processors_; + + DISALLOW_COPY_AND_ASSIGN(BweTest); +}; +} // namespace bwe +} // namespace testing +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_baselinefile.cc b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_baselinefile.cc new file mode 100644 index 000000000000..30723be99979 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_baselinefile.cc @@ -0,0 +1,167 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test_baselinefile.h" + +#include +#include +#include + +#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test_fileutils.h" +#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h" +#include "webrtc/system_wrappers/interface/constructor_magic.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/test/testsupport/fileutils.h" + +namespace webrtc { +namespace testing { +namespace bwe { + +// The format of BWE test baseline files is extremely simple: +// 1. All read/written entities are 32-bit unsigned integers in network byte +// order (Big Endian). +// 2. Files beging with a 2 word header containing a magic marker and file +// format version indicator. The Magic marker reads "BWE!" in a hex dump. +// 3. Each estimate is logged as a pair of words: time in milliseconds and +// estimated bit rate, in bits per second. +const uint32_t kMagicMarker = 0x42574521; +const uint32_t kFileVersion1 = 0x00000001; +const char kResourceSubDir[] = "remote_bitrate_estimator"; + +class BaseLineFileVerify : public BaseLineFileInterface { + public: + // If |allow_missing_file| is set, VerifyOrWrite() will return true even if + // the baseline file is missing. This is the default when verifying files, but + // not when updating (i.e. we always write it out if missing). + BaseLineFileVerify(const std::string& filepath, bool allow_missing_file) + : reader_(), + fail_to_read_response_(false) { + scoped_ptr reader; + reader.reset(ResourceFileReader::Create(filepath, "bin")); + if (!reader.get()) { + printf("WARNING: Missing baseline file for BWE test: %s.bin\n", + filepath.c_str()); + fail_to_read_response_ = allow_missing_file; + } else { + uint32_t magic_marker = 0; + uint32_t file_version = 0; + if (reader->Read(&magic_marker) && magic_marker == kMagicMarker && + reader->Read(&file_version) && file_version == kFileVersion1) { + reader_.swap(reader); + } else { + printf("WARNING: Bad baseline file header for BWE test: %s.bin\n", + filepath.c_str()); + } + } + } + virtual ~BaseLineFileVerify() {} + + virtual void Estimate(int64_t time_ms, uint32_t estimate_bps) { + if (reader_.get()) { + uint32_t read_ms = 0; + uint32_t read_bps = 0; + if (reader_->Read(&read_ms) && read_ms == time_ms && + reader_->Read(&read_bps) && read_bps == estimate_bps) { + } else { + printf("ERROR: Baseline differs starting at: %d ms (%d vs %d)!\n", + static_cast(time_ms), estimate_bps, read_bps); + reader_.reset(NULL); + } + } + } + + virtual bool VerifyOrWrite() { + if (reader_.get()) { + if (reader_->IsAtEnd()) { + return true; + } else { + printf("ERROR: Baseline file contains more data!\n"); + return false; + } + } + return fail_to_read_response_; + } + + private: + scoped_ptr reader_; + bool fail_to_read_response_; + + DISALLOW_IMPLICIT_CONSTRUCTORS(BaseLineFileVerify); +}; + +class BaseLineFileUpdate : public BaseLineFileInterface { + public: + BaseLineFileUpdate(const std::string& filepath, + BaseLineFileInterface* verifier) + : verifier_(verifier), + output_content_(), + filepath_(filepath) { + output_content_.push_back(kMagicMarker); + output_content_.push_back(kFileVersion1); + } + virtual ~BaseLineFileUpdate() {} + + virtual void Estimate(int64_t time_ms, uint32_t estimate_bps) { + verifier_->Estimate(time_ms, estimate_bps); + output_content_.push_back(static_cast(time_ms)); + output_content_.push_back(estimate_bps); + } + + virtual bool VerifyOrWrite() { + if (!verifier_->VerifyOrWrite()) { + std::string dir_path = webrtc::test::OutputPath() + kResourceSubDir; + if (!webrtc::test::CreateDirectory(dir_path)) { + printf("WARNING: Cannot create output dir: %s\n", dir_path.c_str()); + return false; + } + scoped_ptr writer; + writer.reset(OutputFileWriter::Create(filepath_, "bin")); + if (!writer.get()) { + printf("WARNING: Cannot create output file: %s.bin\n", + filepath_.c_str()); + return false; + } + printf("NOTE: Writing baseline file for BWE test: %s.bin\n", + filepath_.c_str()); + for (std::vector::iterator it = output_content_.begin(); + it != output_content_.end(); ++it) { + writer->Write(*it); + } + return true; + } + printf("NOTE: No change, not writing: %s\n", filepath_.c_str()); + return true; + } + + private: + scoped_ptr verifier_; + std::vector output_content_; + std::string filepath_; + + DISALLOW_IMPLICIT_CONSTRUCTORS(BaseLineFileUpdate); +}; + +BaseLineFileInterface* BaseLineFileInterface::Create( + const std::string& filename, bool write_output_file) { + std::string filepath = filename; + std::replace(filepath.begin(), filepath.end(), '/', '_'); + filepath = std::string(kResourceSubDir) + "/" + filepath; + + scoped_ptr result; + result.reset(new BaseLineFileVerify(filepath, !write_output_file)); + if (write_output_file) { + // Takes ownership of the |verifier| instance. + result.reset(new BaseLineFileUpdate(filepath, result.release())); + } + return result.release(); +} +} // namespace bwe +} // namespace testing +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_baselinefile.h b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_baselinefile.h new file mode 100644 index 000000000000..64dfa8553557 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_baselinefile.h @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TEST_BWE_TEST_BASELINEFILE_H_ +#define WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TEST_BWE_TEST_BASELINEFILE_H_ + +#include +#include "webrtc/modules/interface/module_common_types.h" + +namespace webrtc { +namespace testing { +namespace bwe { + +class BaseLineFileInterface { + public: + virtual ~BaseLineFileInterface() {} + + // Compare, or log, one estimate against the baseline file. + virtual void Estimate(int64_t time_ms, uint32_t estimate_bps) = 0; + + // Verify whether there are any differences between the logged estimates and + // those read from the baseline file. If updating the baseline file, write out + // new file if there were differences. Return true if logged estimates are + // identical, or if output file was updated successfully. + virtual bool VerifyOrWrite() = 0; + + // Create an instance for either verifying estimates against a baseline file + // with name |filename|, living in the resources/ directory or, if the flag + // |write_updated_file| is set, write logged estimates to a file with the same + // name, living in the out/ directory. + static BaseLineFileInterface* Create(const std::string& filename, + bool write_updated_file); +}; +} // namespace bwe +} // namespace testing +} // namespace webrtc + +#endif // WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TEST_BWE_TEST_BASELINEFILE_H_ diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_fileutils.cc b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_fileutils.cc new file mode 100644 index 000000000000..4e5d4074a9db --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_fileutils.cc @@ -0,0 +1,97 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test_fileutils.h" + +#ifdef WIN32 +#include +#else +#include +#endif +#include + +#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/test/testsupport/fileutils.h" + +namespace webrtc { +namespace testing { +namespace bwe { + +ResourceFileReader::~ResourceFileReader() { + if (file_ != NULL) { + fclose(file_); + file_ = NULL; + } +} + +bool ResourceFileReader::IsAtEnd() { + int32_t current_pos = ftell(file_); + fseek(file_, 0, SEEK_END); + int32_t end_pos = ftell(file_); + fseek(file_, current_pos, SEEK_SET); + return current_pos == end_pos; +} + +bool ResourceFileReader::Read(uint32_t* out) { + assert(out); + uint32_t tmp = 0; + if (fread(&tmp, 1, sizeof(uint32_t), file_) != sizeof(uint32_t)) { + printf("Error reading!\n"); + return false; + } + *out = ntohl(tmp); + return true; +} + +ResourceFileReader* ResourceFileReader::Create(const std::string& filename, + const std::string& extension) { + std::string filepath = webrtc::test::ResourcePath(filename, extension); + FILE* file = fopen(filepath.c_str(), "rb"); + if (file == NULL) { + BWE_TEST_LOGGING_CONTEXT("ResourceFileReader"); + BWE_TEST_LOGGING_LOG1("Create", "Can't read file: %s", filepath.c_str()); + return 0; + } else { + return new ResourceFileReader(file); + } +} + +OutputFileWriter::~OutputFileWriter() { + if (file_ != NULL) { + fclose(file_); + file_ = NULL; + } +} + +bool OutputFileWriter::Write(uint32_t value) { + uint32_t tmp = htonl(value); + if (fwrite(&tmp, 1, sizeof(uint32_t), file_) != sizeof(uint32_t)) { + return false; + } + return true; +} + +OutputFileWriter* OutputFileWriter::Create(const std::string& filename, + const std::string& extension) { + std::string filepath = webrtc::test::OutputPath() + filename + "." + + extension; + FILE* file = fopen(filepath.c_str(), "wb"); + if (file == NULL) { + BWE_TEST_LOGGING_CONTEXT("OutputFileWriter"); + BWE_TEST_LOGGING_LOG1("Create", "Can't write file: %s", filepath.c_str()); + return NULL; + } else { + return new OutputFileWriter(file); + } +} +} // namespace bwe +} // namespace testing +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_fileutils.h b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_fileutils.h new file mode 100644 index 000000000000..397a16977bca --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_fileutils.h @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TEST_BWE_TEST_FILEUTILS_H_ +#define WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TEST_BWE_TEST_FILEUTILS_H_ + +#include +#include + +#include "webrtc/modules/interface/module_common_types.h" +#include "webrtc/system_wrappers/interface/constructor_magic.h" + +namespace webrtc { +namespace testing { +namespace bwe { + +class ResourceFileReader { + public: + ~ResourceFileReader(); + + bool IsAtEnd(); + bool Read(uint32_t* out); + + static ResourceFileReader* Create(const std::string& filename, + const std::string& extension); + + private: + explicit ResourceFileReader(FILE* file) : file_(file) {} + FILE* file_; + DISALLOW_IMPLICIT_CONSTRUCTORS(ResourceFileReader); +}; + +class OutputFileWriter { + public: + ~OutputFileWriter(); + + bool Write(uint32_t value); + + static OutputFileWriter* Create(const std::string& filename, + const std::string& extension); + + private: + explicit OutputFileWriter(FILE* file) : file_(file) {} + FILE* file_; + DISALLOW_IMPLICIT_CONSTRUCTORS(OutputFileWriter); +}; +} // namespace bwe +} // namespace testing +} // namespace webrtc + +#endif // WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TEST_BWE_TEST_FILEUTILS_H_ diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.cc b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.cc new file mode 100644 index 000000000000..546ae2c4003f --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.cc @@ -0,0 +1,489 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.h" + +#include +#include + +namespace webrtc { +namespace testing { +namespace bwe { + +class RateCounter { + public: + RateCounter() + : kWindowSizeUs(1000000), + packets_per_second_(0), + bytes_per_second_(0), + last_accumulated_us_(0), + window_() {} + + void UpdateRates(int64_t send_time_us, uint32_t payload_size) { + packets_per_second_++; + bytes_per_second_ += payload_size; + last_accumulated_us_ = send_time_us; + window_.push_back(std::make_pair(send_time_us, payload_size)); + while (!window_.empty()) { + const TimeSizePair& packet = window_.front(); + if (packet.first > (last_accumulated_us_ - kWindowSizeUs)) { + break; + } + assert(packets_per_second_ >= 1); + assert(bytes_per_second_ >= packet.second); + packets_per_second_--; + bytes_per_second_ -= packet.second; + window_.pop_front(); + } + } + + uint32_t bits_per_second() const { + return bytes_per_second_ * 8; + } + uint32_t packets_per_second() const { return packets_per_second_; } + + private: + typedef std::pair TimeSizePair; + + const int64_t kWindowSizeUs; + uint32_t packets_per_second_; + uint32_t bytes_per_second_; + int64_t last_accumulated_us_; + std::list window_; +}; + +Random::Random(uint32_t seed) + : a_(0x531FDB97 ^ seed), + b_(0x6420ECA8 + seed) { +} + +float Random::Rand() { + const float kScale = 1.0f / 0xffffffff; + float result = kScale * b_; + a_ ^= b_; + b_ += a_; + return result; +} + +int Random::Gaussian(int mean, int standard_deviation) { + // Creating a Normal distribution variable from two independent uniform + // variables based on the Box-Muller transform, which is defined on the + // interval (0, 1], hence the mask+add below. + const double kPi = 3.14159265358979323846; + const double kScale = 1.0 / 0x80000000ul; + double u1 = kScale * ((a_ & 0x7ffffffful) + 1); + double u2 = kScale * ((b_ & 0x7ffffffful) + 1); + a_ ^= b_; + b_ += a_; + return static_cast(mean + standard_deviation * + std::sqrt(-2 * std::log(u1)) * std::cos(2 * kPi * u2)); +} + +Packet::Packet() + : creation_time_us_(-1), + send_time_us_(-1), + payload_size_(0) { + memset(&header_, 0, sizeof(header_)); +} + +Packet::Packet(int64_t send_time_us, uint32_t payload_size, + const RTPHeader& header) + : creation_time_us_(send_time_us), + send_time_us_(send_time_us), + payload_size_(payload_size), + header_(header) { +} + +Packet::Packet(int64_t send_time_us, uint32_t sequence_number) + : creation_time_us_(send_time_us), + send_time_us_(send_time_us), + payload_size_(0) { + memset(&header_, 0, sizeof(header_)); + header_.sequenceNumber = sequence_number; +} + +bool Packet::operator<(const Packet& rhs) const { + return send_time_us_ < rhs.send_time_us_; +} + +void Packet::set_send_time_us(int64_t send_time_us) { + assert(send_time_us >= 0); + send_time_us_ = send_time_us; +} + +bool IsTimeSorted(const Packets& packets) { + PacketsConstIt last_it = packets.begin(); + for (PacketsConstIt it = last_it; it != packets.end(); ++it) { + if (it != last_it && *it < *last_it) { + return false; + } + last_it = it; + } + return true; +} + +PacketProcessor::PacketProcessor(PacketProcessorListener* listener) + : listener_(listener) { + if (listener_) { + listener_->AddPacketProcessor(this); + } +} + +PacketProcessor::~PacketProcessor() { + if (listener_) { + listener_->RemovePacketProcessor(this); + } +} + +RateCounterFilter::RateCounterFilter(PacketProcessorListener* listener) + : PacketProcessor(listener), + rate_counter_(new RateCounter()), + pps_stats_(), + kbps_stats_(), + name_("") {} + +RateCounterFilter::RateCounterFilter(PacketProcessorListener* listener, + const std::string& name) + : PacketProcessor(listener), + rate_counter_(new RateCounter()), + pps_stats_(), + kbps_stats_(), + name_(name) {} + +RateCounterFilter::~RateCounterFilter() { + LogStats(); +} + +uint32_t RateCounterFilter::packets_per_second() const { + return rate_counter_->packets_per_second(); +} + +uint32_t RateCounterFilter::bits_per_second() const { + return rate_counter_->bits_per_second(); +} + +void RateCounterFilter::LogStats() { + BWE_TEST_LOGGING_CONTEXT("RateCounterFilter"); + pps_stats_.Log("pps"); + kbps_stats_.Log("kbps"); +} + +void RateCounterFilter::Plot(int64_t timestamp_ms) { + BWE_TEST_LOGGING_CONTEXT(name_.c_str()); + BWE_TEST_LOGGING_PLOT("Throughput_#1", timestamp_ms, + rate_counter_->bits_per_second() / 1000.0); +} + +void RateCounterFilter::RunFor(int64_t /*time_ms*/, Packets* in_out) { + assert(in_out); + for (PacketsConstIt it = in_out->begin(); it != in_out->end(); ++it) { + rate_counter_->UpdateRates(it->send_time_us(), it->payload_size()); + } + pps_stats_.Push(rate_counter_->packets_per_second()); + kbps_stats_.Push(rate_counter_->bits_per_second() / 1000.0); +} + +LossFilter::LossFilter(PacketProcessorListener* listener) + : PacketProcessor(listener), + random_(0x12345678), + loss_fraction_(0.0f) { +} + +void LossFilter::SetLoss(float loss_percent) { + BWE_TEST_LOGGING_ENABLE(false); + BWE_TEST_LOGGING_LOG1("Loss", "%f%%", loss_percent); + assert(loss_percent >= 0.0f); + assert(loss_percent <= 100.0f); + loss_fraction_ = loss_percent * 0.01f; +} + +void LossFilter::RunFor(int64_t /*time_ms*/, Packets* in_out) { + assert(in_out); + for (PacketsIt it = in_out->begin(); it != in_out->end(); ) { + if (random_.Rand() < loss_fraction_) { + it = in_out->erase(it); + } else { + ++it; + } + } +} + +DelayFilter::DelayFilter(PacketProcessorListener* listener) + : PacketProcessor(listener), + delay_us_(0), + last_send_time_us_(0) { +} + +void DelayFilter::SetDelay(int64_t delay_ms) { + BWE_TEST_LOGGING_ENABLE(false); + BWE_TEST_LOGGING_LOG1("Delay", "%d ms", static_cast(delay_ms)); + assert(delay_ms >= 0); + delay_us_ = delay_ms * 1000; +} + +void DelayFilter::RunFor(int64_t /*time_ms*/, Packets* in_out) { + assert(in_out); + for (PacketsIt it = in_out->begin(); it != in_out->end(); ++it) { + int64_t new_send_time_us = it->send_time_us() + delay_us_; + last_send_time_us_ = std::max(last_send_time_us_, new_send_time_us); + it->set_send_time_us(last_send_time_us_); + } +} + +JitterFilter::JitterFilter(PacketProcessorListener* listener) + : PacketProcessor(listener), + random_(0x89674523), + stddev_jitter_us_(0), + last_send_time_us_(0) { +} + +void JitterFilter::SetJitter(int64_t stddev_jitter_ms) { + BWE_TEST_LOGGING_ENABLE(false); + BWE_TEST_LOGGING_LOG1("Jitter", "%d ms", + static_cast(stddev_jitter_ms)); + assert(stddev_jitter_ms >= 0); + stddev_jitter_us_ = stddev_jitter_ms * 1000; +} + +void JitterFilter::RunFor(int64_t /*time_ms*/, Packets* in_out) { + assert(in_out); + for (PacketsIt it = in_out->begin(); it != in_out->end(); ++it) { + int64_t new_send_time_us = it->send_time_us(); + new_send_time_us += random_.Gaussian(0, stddev_jitter_us_); + last_send_time_us_ = std::max(last_send_time_us_, new_send_time_us); + it->set_send_time_us(last_send_time_us_); + } +} + +ReorderFilter::ReorderFilter(PacketProcessorListener* listener) + : PacketProcessor(listener), + random_(0x27452389), + reorder_fraction_(0.0f) { +} + +void ReorderFilter::SetReorder(float reorder_percent) { + BWE_TEST_LOGGING_ENABLE(false); + BWE_TEST_LOGGING_LOG1("Reordering", "%f%%", reorder_percent); + assert(reorder_percent >= 0.0f); + assert(reorder_percent <= 100.0f); + reorder_fraction_ = reorder_percent * 0.01f; +} + +void ReorderFilter::RunFor(int64_t /*time_ms*/, Packets* in_out) { + assert(in_out); + if (in_out->size() >= 2) { + PacketsIt last_it = in_out->begin(); + PacketsIt it = last_it; + while (++it != in_out->end()) { + if (random_.Rand() < reorder_fraction_) { + int64_t t1 = last_it->send_time_us(); + int64_t t2 = it->send_time_us(); + std::swap(*last_it, *it); + last_it->set_send_time_us(t1); + it->set_send_time_us(t2); + } + last_it = it; + } + } +} + +ChokeFilter::ChokeFilter(PacketProcessorListener* listener) + : PacketProcessor(listener), + kbps_(1200), + max_delay_us_(0), + last_send_time_us_(0) { +} + +void ChokeFilter::SetCapacity(uint32_t kbps) { + BWE_TEST_LOGGING_ENABLE(false); + BWE_TEST_LOGGING_LOG1("BitrateChoke", "%d kbps", kbps); + kbps_ = kbps; +} + +void ChokeFilter::SetMaxDelay(int64_t max_delay_ms) { + BWE_TEST_LOGGING_ENABLE(false); + BWE_TEST_LOGGING_LOG1("Max Delay", "%d ms", static_cast(max_delay_ms)); + assert(max_delay_ms >= 0); + max_delay_us_ = max_delay_ms * 1000; +} + +void ChokeFilter::RunFor(int64_t /*time_ms*/, Packets* in_out) { + assert(in_out); + for (PacketsIt it = in_out->begin(); it != in_out->end(); ) { + int64_t earliest_send_time_us = last_send_time_us_ + + (it->payload_size() * 8 * 1000 + kbps_ / 2) / kbps_; + int64_t new_send_time_us = std::max(it->send_time_us(), + earliest_send_time_us); + if (max_delay_us_ == 0 || + max_delay_us_ >= (new_send_time_us - it->send_time_us())) { + it->set_send_time_us(new_send_time_us); + last_send_time_us_ = new_send_time_us; + ++it; + } else { + it = in_out->erase(it); + } + } +} + +TraceBasedDeliveryFilter::TraceBasedDeliveryFilter( + PacketProcessorListener* listener) + : PacketProcessor(listener), + delivery_times_us_(), + next_delivery_it_(), + local_time_us_(-1), + rate_counter_(new RateCounter), + name_("") {} + +TraceBasedDeliveryFilter::TraceBasedDeliveryFilter( + PacketProcessorListener* listener, + const std::string& name) + : PacketProcessor(listener), + delivery_times_us_(), + next_delivery_it_(), + local_time_us_(-1), + rate_counter_(new RateCounter), + name_(name) {} + +TraceBasedDeliveryFilter::~TraceBasedDeliveryFilter() { +} + +bool TraceBasedDeliveryFilter::Init(const std::string& filename) { + FILE* trace_file = fopen(filename.c_str(), "r"); + if (!trace_file) { + return false; + } + int64_t first_timestamp = -1; + while(!feof(trace_file)) { + const size_t kMaxLineLength = 100; + char line[kMaxLineLength]; + if (fgets(line, kMaxLineLength, trace_file)) { + std::string line_string(line); + std::istringstream buffer(line_string); + int64_t timestamp; + buffer >> timestamp; + timestamp /= 1000; // Convert to microseconds. + if (first_timestamp == -1) + first_timestamp = timestamp; + assert(delivery_times_us_.empty() || + timestamp - first_timestamp - delivery_times_us_.back() >= 0); + delivery_times_us_.push_back(timestamp - first_timestamp); + } + } + assert(!delivery_times_us_.empty()); + next_delivery_it_ = delivery_times_us_.begin(); + fclose(trace_file); + return true; +} + +void TraceBasedDeliveryFilter::Plot(int64_t timestamp_ms) { + BWE_TEST_LOGGING_CONTEXT(name_.c_str()); + // This plots the max possible throughput of the trace-based delivery filter, + // which will be reached if a packet sent on every packet slot of the trace. + BWE_TEST_LOGGING_PLOT("MaxThroughput_#1", timestamp_ms, + rate_counter_->bits_per_second() / 1000.0); +} + +void TraceBasedDeliveryFilter::RunFor(int64_t time_ms, Packets* in_out) { + assert(in_out); + for (PacketsIt it = in_out->begin(); it != in_out->end(); ++it) { + do { + ProceedToNextSlot(); + const int kPayloadSize = 1240; + rate_counter_->UpdateRates(local_time_us_, kPayloadSize); + } while (local_time_us_ < it->send_time_us()); + it->set_send_time_us(local_time_us_); + } +} + +void TraceBasedDeliveryFilter::ProceedToNextSlot() { + if (*next_delivery_it_ <= local_time_us_) { + ++next_delivery_it_; + if (next_delivery_it_ == delivery_times_us_.end()) { + // When the trace wraps we allow two packets to be sent back-to-back. + for (TimeList::iterator it = delivery_times_us_.begin(); + it != delivery_times_us_.end(); ++it) { + *it += local_time_us_; + } + next_delivery_it_ = delivery_times_us_.begin(); + } + } + local_time_us_ = *next_delivery_it_; +} + +PacketSender::PacketSender(PacketProcessorListener* listener) + : PacketProcessor(listener) { +} + +VideoSender::VideoSender(PacketProcessorListener* listener, float fps, + uint32_t kbps, uint32_t ssrc, float first_frame_offset) + : PacketSender(listener), + kMaxPayloadSizeBytes(1000), + kTimestampBase(0xff80ff00ul), + frame_period_ms_(1000.0 / fps), + bytes_per_second_((1000 * kbps) / 8), + frame_size_bytes_(bytes_per_second_ / fps), + next_frame_ms_(frame_period_ms_ * first_frame_offset), + now_ms_(0.0), + prototype_header_() { + assert(first_frame_offset >= 0.0f); + assert(first_frame_offset < 1.0f); + memset(&prototype_header_, 0, sizeof(prototype_header_)); + prototype_header_.ssrc = ssrc; + prototype_header_.sequenceNumber = 0xf000u; +} + +uint32_t VideoSender::GetCapacityKbps() const { + return (bytes_per_second_ * 8) / 1000; +} + +void VideoSender::RunFor(int64_t time_ms, Packets* in_out) { + assert(in_out); + now_ms_ += time_ms; + Packets newPackets; + while (now_ms_ >= next_frame_ms_) { + prototype_header_.sequenceNumber++; + prototype_header_.timestamp = kTimestampBase + + static_cast(next_frame_ms_ * 90.0); + prototype_header_.extension.absoluteSendTime = (kTimestampBase + + ((static_cast(next_frame_ms_ * (1 << 18)) + 500) / 1000)) & + 0x00fffffful; + prototype_header_.extension.transmissionTimeOffset = 0; + + // Generate new packets for this frame, all with the same timestamp, + // but the payload size is capped, so if the whole frame doesn't fit in + // one packet, we will see a number of equally sized packets followed by + // one smaller at the tail. + int64_t send_time_us = next_frame_ms_ * 1000.0; + uint32_t payload_size = frame_size_bytes_; + while (payload_size > 0) { + uint32_t size = std::min(kMaxPayloadSizeBytes, payload_size); + newPackets.push_back(Packet(send_time_us, size, prototype_header_)); + payload_size -= size; + } + + next_frame_ms_ += frame_period_ms_; + } + in_out->merge(newPackets); +} + +AdaptiveVideoSender::AdaptiveVideoSender(PacketProcessorListener* listener, + float fps, + uint32_t kbps, + uint32_t ssrc, + float first_frame_offset) + : VideoSender(listener, fps, kbps, ssrc, first_frame_offset) {} + +void AdaptiveVideoSender::GiveFeedback(const PacketSender::Feedback& feedback) { + bytes_per_second_ = feedback.estimated_bps / 8; + frame_size_bytes_ = (bytes_per_second_ * frame_period_ms_ + 500) / 1000; +} +} // namespace bwe +} // namespace testing +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.h b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.h new file mode 100644 index 000000000000..ddddd7499ba0 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.h @@ -0,0 +1,429 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TEST_BWE_TEST_FRAMEWORK_H_ +#define WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TEST_BWE_TEST_FRAMEWORK_H_ + +#include +#include +#include +#include +#include +#include +#include + +#include "webrtc/modules/interface/module_common_types.h" +#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" + +namespace webrtc { +namespace testing { +namespace bwe { + +class RateCounter; + +template class Stats { + public: + Stats() + : data_(), + last_mean_count_(0), + last_variance_count_(0), + last_minmax_count_(0), + mean_(0), + variance_(0), + min_(0), + max_(0) { + } + + void Push(T data_point) { + data_.push_back(data_point); + } + + T GetMean() { + if (last_mean_count_ != data_.size()) { + last_mean_count_ = data_.size(); + mean_ = std::accumulate(data_.begin(), data_.end(), static_cast(0)); + assert(last_mean_count_ != 0); + mean_ /= static_cast(last_mean_count_); + } + return mean_; + } + T GetVariance() { + if (last_variance_count_ != data_.size()) { + last_variance_count_ = data_.size(); + T mean = GetMean(); + variance_ = 0; + for (typename std::vector::const_iterator it = data_.begin(); + it != data_.end(); ++it) { + T diff = (*it - mean); + variance_ += diff * diff; + } + assert(last_variance_count_ != 0); + variance_ /= static_cast(last_variance_count_); + } + return variance_; + } + T GetStdDev() { + return std::sqrt(static_cast(GetVariance())); + } + T GetMin() { + RefreshMinMax(); + return min_; + } + T GetMax() { + RefreshMinMax(); + return max_; + } + + void Log(const std::string& units) { + BWE_TEST_LOGGING_LOG5("", "%f %s\t+/-%f\t[%f,%f]", + GetMean(), units.c_str(), GetStdDev(), GetMin(), GetMax()); + } + + private: + void RefreshMinMax() { + if (last_minmax_count_ != data_.size()) { + last_minmax_count_ = data_.size(); + min_ = max_ = 0; + if (data_.empty()) { + return; + } + typename std::vector::const_iterator it = data_.begin(); + min_ = max_ = *it; + while (++it != data_.end()) { + min_ = std::min(min_, *it); + max_ = std::max(max_, *it); + } + } + } + + std::vector data_; + typename std::vector::size_type last_mean_count_; + typename std::vector::size_type last_variance_count_; + typename std::vector::size_type last_minmax_count_; + T mean_; + T variance_; + T min_; + T max_; +}; + +class Random { + public: + explicit Random(uint32_t seed); + + // Return pseudo random number in the interval [0.0, 1.0]. + float Rand(); + + // Normal Distribution. + int Gaussian(int mean, int standard_deviation); + + // TODO(solenberg): Random from histogram. + // template int Distribution(const std::vector histogram) { + + private: + uint32_t a_; + uint32_t b_; + + DISALLOW_IMPLICIT_CONSTRUCTORS(Random); +}; + +class Packet { + public: + Packet(); + Packet(int64_t send_time_us, uint32_t payload_size, + const RTPHeader& header); + Packet(int64_t send_time_us, uint32_t sequence_number); + + bool operator<(const Packet& rhs) const; + + int64_t creation_time_us() const { return creation_time_us_; } + void set_send_time_us(int64_t send_time_us); + int64_t send_time_us() const { return send_time_us_; } + uint32_t payload_size() const { return payload_size_; } + const RTPHeader& header() const { return header_; } + + private: + int64_t creation_time_us_; // Time when the packet was created. + int64_t send_time_us_; // Time the packet left last processor touching it. + uint32_t payload_size_; // Size of the (non-existent, simulated) payload. + RTPHeader header_; // Actual contents. +}; + +typedef std::list Packets; +typedef std::list::iterator PacketsIt; +typedef std::list::const_iterator PacketsConstIt; + +bool IsTimeSorted(const Packets& packets); + +class PacketProcessor; + +class PacketProcessorListener { + public: + virtual ~PacketProcessorListener() {} + + virtual void AddPacketProcessor(PacketProcessor* processor) = 0; + virtual void RemovePacketProcessor(PacketProcessor* processor) = 0; +}; + +class PacketProcessor { + public: + explicit PacketProcessor(PacketProcessorListener* listener); + virtual ~PacketProcessor(); + + // Called after each simulation batch to allow the processor to plot any + // internal data. + virtual void Plot(int64_t timestamp_ms) {} + + // Run simulation for |time_ms| micro seconds, consuming packets from, and + // producing packets into in_out. The outgoing packet list must be sorted on + // |send_time_us_|. The simulation time |time_ms| is optional to use. + virtual void RunFor(int64_t time_ms, Packets* in_out) = 0; + + private: + PacketProcessorListener* listener_; + + DISALLOW_COPY_AND_ASSIGN(PacketProcessor); +}; + +class RateCounterFilter : public PacketProcessor { + public: + explicit RateCounterFilter(PacketProcessorListener* listener); + RateCounterFilter(PacketProcessorListener* listener, + const std::string& name); + virtual ~RateCounterFilter(); + + uint32_t packets_per_second() const; + uint32_t bits_per_second() const; + + void LogStats(); + virtual void Plot(int64_t timestamp_ms); + virtual void RunFor(int64_t time_ms, Packets* in_out); + + private: + scoped_ptr rate_counter_; + Stats pps_stats_; + Stats kbps_stats_; + std::string name_; + + DISALLOW_IMPLICIT_CONSTRUCTORS(RateCounterFilter); +}; + +class LossFilter : public PacketProcessor { + public: + explicit LossFilter(PacketProcessorListener* listener); + virtual ~LossFilter() {} + + void SetLoss(float loss_percent); + virtual void RunFor(int64_t time_ms, Packets* in_out); + + private: + Random random_; + float loss_fraction_; + + DISALLOW_IMPLICIT_CONSTRUCTORS(LossFilter); +}; + +class DelayFilter : public PacketProcessor { + public: + explicit DelayFilter(PacketProcessorListener* listener); + virtual ~DelayFilter() {} + + void SetDelay(int64_t delay_ms); + virtual void RunFor(int64_t time_ms, Packets* in_out); + + private: + int64_t delay_us_; + int64_t last_send_time_us_; + + DISALLOW_IMPLICIT_CONSTRUCTORS(DelayFilter); +}; + +class JitterFilter : public PacketProcessor { + public: + explicit JitterFilter(PacketProcessorListener* listener); + virtual ~JitterFilter() {} + + void SetJitter(int64_t stddev_jitter_ms); + virtual void RunFor(int64_t time_ms, Packets* in_out); + + private: + Random random_; + int64_t stddev_jitter_us_; + int64_t last_send_time_us_; + + DISALLOW_IMPLICIT_CONSTRUCTORS(JitterFilter); +}; + +class ReorderFilter : public PacketProcessor { + public: + explicit ReorderFilter(PacketProcessorListener* listener); + virtual ~ReorderFilter() {} + + void SetReorder(float reorder_percent); + virtual void RunFor(int64_t time_ms, Packets* in_out); + + private: + Random random_; + float reorder_fraction_; + + DISALLOW_IMPLICIT_CONSTRUCTORS(ReorderFilter); +}; + +// Apply a bitrate choke with an infinite queue on the packet stream. +class ChokeFilter : public PacketProcessor { + public: + explicit ChokeFilter(PacketProcessorListener* listener); + virtual ~ChokeFilter() {} + + void SetCapacity(uint32_t kbps); + void SetMaxDelay(int64_t max_delay_ms); + virtual void RunFor(int64_t time_ms, Packets* in_out); + + private: + uint32_t kbps_; + int64_t max_delay_us_; + int64_t last_send_time_us_; + + DISALLOW_IMPLICIT_CONSTRUCTORS(ChokeFilter); +}; + +class TraceBasedDeliveryFilter : public PacketProcessor { + public: + explicit TraceBasedDeliveryFilter(PacketProcessorListener* listener); + TraceBasedDeliveryFilter(PacketProcessorListener* listener, + const std::string& name); + virtual ~TraceBasedDeliveryFilter(); + + // The file should contain nanosecond timestamps corresponding to the time + // when the network can accept another packet. The timestamps should be + // separated by new lines, e.g., "100000000\n125000000\n321000000\n..." + bool Init(const std::string& filename); + virtual void Plot(int64_t timestamp_ms); + virtual void RunFor(int64_t time_ms, Packets* in_out); + + private: + void ProceedToNextSlot(); + + typedef std::vector TimeList; + TimeList delivery_times_us_; + TimeList::const_iterator next_delivery_it_; + int64_t local_time_us_; + scoped_ptr rate_counter_; + std::string name_; + + DISALLOW_COPY_AND_ASSIGN(TraceBasedDeliveryFilter); +}; + +class PacketSender : public PacketProcessor { + public: + struct Feedback { + uint32_t estimated_bps; + }; + + explicit PacketSender(PacketProcessorListener* listener); + virtual ~PacketSender() {} + + virtual uint32_t GetCapacityKbps() const { return 0; } + + // Call GiveFeedback() with the returned interval in milliseconds, provided + // there is a new estimate available. + // Note that changing the feedback interval affects the timing of when the + // output of the estimators is sampled and therefore the baseline files may + // have to be regenerated. + virtual int64_t GetFeedbackIntervalMs() const { return 1000; } + virtual void GiveFeedback(const Feedback& feedback) {} + + private: + DISALLOW_COPY_AND_ASSIGN(PacketSender); +}; + +struct PacketSenderFactory { + PacketSenderFactory() {} + virtual ~PacketSenderFactory() {} + virtual PacketSender* Create() const = 0; +}; + +class VideoSender : public PacketSender { + public: + VideoSender(PacketProcessorListener* listener, float fps, uint32_t kbps, + uint32_t ssrc, float first_frame_offset); + virtual ~VideoSender() {} + + uint32_t max_payload_size_bytes() const { return kMaxPayloadSizeBytes; } + uint32_t bytes_per_second() const { return bytes_per_second_; } + + virtual uint32_t GetCapacityKbps() const; + + virtual void RunFor(int64_t time_ms, Packets* in_out); + + protected: + const uint32_t kMaxPayloadSizeBytes; + const uint32_t kTimestampBase; + const double frame_period_ms_; + uint32_t bytes_per_second_; + uint32_t frame_size_bytes_; + + private: + double next_frame_ms_; + double now_ms_; + RTPHeader prototype_header_; + + DISALLOW_IMPLICIT_CONSTRUCTORS(VideoSender); +}; + +class AdaptiveVideoSender : public VideoSender { + public: + AdaptiveVideoSender(PacketProcessorListener* listener, float fps, + uint32_t kbps, uint32_t ssrc, float first_frame_offset); + virtual ~AdaptiveVideoSender() {} + + virtual int64_t GetFeedbackIntervalMs() const { return 100; } + virtual void GiveFeedback(const Feedback& feedback); + +private: + DISALLOW_IMPLICIT_CONSTRUCTORS(AdaptiveVideoSender); +}; + +class VideoPacketSenderFactory : public PacketSenderFactory { + public: + VideoPacketSenderFactory(float fps, uint32_t kbps, uint32_t ssrc, + float frame_offset) + : fps_(fps), + kbps_(kbps), + ssrc_(ssrc), + frame_offset_(frame_offset) { + } + virtual ~VideoPacketSenderFactory() {} + virtual PacketSender* Create() const { + return new VideoSender(NULL, fps_, kbps_, ssrc_, frame_offset_); + } + protected: + float fps_; + uint32_t kbps_; + uint32_t ssrc_; + float frame_offset_; +}; + +class AdaptiveVideoPacketSenderFactory : public VideoPacketSenderFactory { + public: + AdaptiveVideoPacketSenderFactory(float fps, uint32_t kbps, uint32_t ssrc, + float frame_offset) + : VideoPacketSenderFactory(fps, kbps, ssrc, frame_offset) {} + virtual ~AdaptiveVideoPacketSenderFactory() {} + virtual PacketSender* Create() const { + return new AdaptiveVideoSender(NULL, fps_, kbps_, ssrc_, frame_offset_); + } +}; + +} // namespace bwe +} // namespace testing +} // namespace webrtc + +#endif // WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TEST_BWE_TEST_FRAMEWORK_H_ diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework_unittest.cc b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework_unittest.cc new file mode 100644 index 000000000000..1ef421ac43d9 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework_unittest.cc @@ -0,0 +1,909 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.h" + +#include + +#include "gtest/gtest.h" +#include "webrtc/system_wrappers/interface/constructor_magic.h" +#include "webrtc/test/testsupport/fileutils.h" + +using std::vector; + +namespace webrtc { +namespace testing { +namespace bwe { + +TEST(BweTestFramework_RandomTest, Gaussian) { + enum { + kN = 100000, + kBuckets = 100, + kMean = 49, + kStddev = 10 + }; + + Random random(0x12345678); + + int buckets[kBuckets] = {0}; + for (int i = 0; i < kN; ++i) { + int index = random.Gaussian(kMean, kStddev); + if (index >= 0 && index < kBuckets) { + buckets[index]++; + } + } + + const double kPi = 3.14159265358979323846; + const double kScale = kN / (kStddev * std::sqrt(2.0 * kPi)); + const double kDiv = -2.0 * kStddev * kStddev; + double self_corr = 0.0; + double bucket_corr = 0.0; + for (int n = 0; n < kBuckets; ++n) { + double normal_dist = kScale * std::exp((n - kMean) * (n - kMean) / kDiv); + self_corr += normal_dist * normal_dist; + bucket_corr += normal_dist * buckets[n]; + } + printf("Correlation: %f (random sample), %f (self), %f (quotient)\n", + bucket_corr, self_corr, bucket_corr / self_corr); + EXPECT_NEAR(1.0, bucket_corr / self_corr, 0.0004); +} + +static bool IsSequenceNumberSorted(const Packets& packets) { + PacketsConstIt last_it = packets.begin(); + for (PacketsConstIt it = last_it; it != packets.end(); ++it) { + if (IsNewerSequenceNumber(last_it->header().sequenceNumber, + it->header().sequenceNumber)) { + return false; + } + last_it = it; + } + return true; +} + +TEST(BweTestFramework_PacketTest, IsTimeSorted) { + Packets packets; + // Insert some packets in order... + EXPECT_TRUE(IsTimeSorted(packets)); + + packets.push_back(Packet(100, 0)); + EXPECT_TRUE(IsTimeSorted(packets)); + + packets.push_back(Packet(110, 0)); + EXPECT_TRUE(IsTimeSorted(packets)); + + // ...and one out-of-order... + packets.push_back(Packet(100, 0)); + EXPECT_FALSE(IsTimeSorted(packets)); + + // ...remove the out-of-order packet, insert another in-order packet. + packets.pop_back(); + packets.push_back(Packet(120, 0)); + EXPECT_TRUE(IsTimeSorted(packets)); +} + +TEST(BweTestFramework_PacketTest, IsSequenceNumberSorted) { + Packets packets; + // Insert some packets in order... + EXPECT_TRUE(IsSequenceNumberSorted(packets)); + + packets.push_back(Packet(0, 100)); + EXPECT_TRUE(IsSequenceNumberSorted(packets)); + + packets.push_back(Packet(0, 110)); + EXPECT_TRUE(IsSequenceNumberSorted(packets)); + + // ...and one out-of-order... + packets.push_back(Packet(0, 100)); + EXPECT_FALSE(IsSequenceNumberSorted(packets)); + + // ...remove the out-of-order packet, insert another in-order packet. + packets.pop_back(); + packets.push_back(Packet(0, 120)); + EXPECT_TRUE(IsSequenceNumberSorted(packets)); +} + +TEST(BweTestFramework_StatsTest, Mean) { + Stats stats; + EXPECT_EQ(0, stats.GetMean()); + + stats.Push(1); + stats.Push(3); + EXPECT_EQ(2, stats.GetMean()); + + // Integer division rounds (1+3-3)/3 to 0. + stats.Push(-3); + EXPECT_EQ(0, stats.GetMean()); +} + +TEST(BweTestFramework_StatsTest, Variance) { + Stats stats; + EXPECT_EQ(0, stats.GetVariance()); + + // Mean is 2 ; ((1-2)*(1-2)+(3-2)*(3-2))/2 = (1+1)/2 = 1 + stats.Push(1); + stats.Push(3); + EXPECT_EQ(1, stats.GetVariance()); + + // Integer division rounds 26/3 to 8 + // Mean is 0 ; (1*1+3*3+(-4)*(-4))/3 = (1+9+16)/3 = 8 + stats.Push(-4); + EXPECT_EQ(8, stats.GetVariance()); +} + +TEST(BweTestFramework_StatsTest, StdDev) { + Stats stats; + EXPECT_EQ(0, stats.GetStdDev()); + + // Variance is 1 ; sqrt(1) = 1 + stats.Push(1); + stats.Push(3); + EXPECT_EQ(1, stats.GetStdDev()); + + // Variance is 8 ; sqrt(8) = 2 with integers. + stats.Push(-4); + EXPECT_EQ(2, stats.GetStdDev()); +} + +TEST(BweTestFramework_StatsTest, MinMax) { + Stats stats; + EXPECT_EQ(0, stats.GetMin()); + EXPECT_EQ(0, stats.GetMax()); + + stats.Push(1); + EXPECT_EQ(1, stats.GetMin()); + EXPECT_EQ(1, stats.GetMax()); + + stats.Push(3); + EXPECT_EQ(1, stats.GetMin()); + EXPECT_EQ(3, stats.GetMax()); + + stats.Push(-4); + EXPECT_EQ(-4, stats.GetMin()); + EXPECT_EQ(3, stats.GetMax()); +} + +class BweTestFramework_RateCounterFilterTest : public ::testing::Test { + public: + BweTestFramework_RateCounterFilterTest() + : filter_(NULL), + now_ms_(0) { + } + virtual ~BweTestFramework_RateCounterFilterTest() {} + + protected: + void TestRateCounter(int64_t run_for_ms, uint32_t payload_bits, + uint32_t expected_pps, uint32_t expected_bps) { + Packets packets; + RTPHeader header = {0}; + // "Send" a packet every 10 ms. + for (int64_t i = 0; i < run_for_ms; i += 10, now_ms_ += 10) { + packets.push_back(Packet(now_ms_ * 1000, payload_bits / 8, header)); + } + filter_.RunFor(run_for_ms, &packets); + ASSERT_TRUE(IsTimeSorted(packets)); + EXPECT_EQ(expected_pps, filter_.packets_per_second()); + EXPECT_EQ(expected_bps, filter_.bits_per_second()); + } + + private: + RateCounterFilter filter_; + int64_t now_ms_; + + DISALLOW_COPY_AND_ASSIGN(BweTestFramework_RateCounterFilterTest); +}; + +TEST_F(BweTestFramework_RateCounterFilterTest, Short) { + // 100ms, 100 bytes per packet, should result in 10 pps and 8 kbps. We're + // generating one packet every 10 ms ; 10 * 800 = 8k + TestRateCounter(100, 800, 10, 8000); +} + +TEST_F(BweTestFramework_RateCounterFilterTest, Medium) { + // 100ms, like above. + TestRateCounter(100, 800, 10, 8000); + // 1000ms, 100 bpp, should result in 100 pps and 80 kbps. We're still + // generating packets every 10 ms. + TestRateCounter(900, 800, 100, 80000); +} + +TEST_F(BweTestFramework_RateCounterFilterTest, Long) { + // 100ms, 1000ms, like above. + TestRateCounter(100, 800, 10, 8000); + TestRateCounter(900, 800, 100, 80000); + // 2000ms, should only see rate of last second, so 100 pps, and 40 kbps now. + TestRateCounter(1000, 400, 100, 40000); + // 2500ms, half a second with zero payload size. We should get same pps as + // before, but kbps should drop to half of previous rate. + TestRateCounter(500, 0, 100, 20000); + // Another half second with zero payload size. Now the kbps rate should drop + // to zero. + TestRateCounter(500, 0, 100, 0); + // Increate payload size again. 200 * 100 * 0.5 = 10 kbps. + TestRateCounter(500, 200, 100, 10000); +} + +static void TestLossFilter(float loss_percent, bool zero_tolerance) { + LossFilter filter(NULL); + filter.SetLoss(loss_percent); + Packets::size_type sent_packets = 0; + Packets::size_type remaining_packets = 0; + + // No input should yield no output + { + Packets packets; + sent_packets += packets.size(); + filter.RunFor(0, &packets); + ASSERT_TRUE(IsTimeSorted(packets)); + ASSERT_TRUE(IsSequenceNumberSorted(packets)); + remaining_packets += packets.size(); + EXPECT_EQ(0u, sent_packets); + EXPECT_EQ(0u, remaining_packets); + } + + // Generate and process 10000 packets in different batch sizes (some empty) + for (int i = 0; i < 2225; ++i) { + Packets packets; + packets.insert(packets.end(), i % 10, Packet()); + sent_packets += packets.size(); + filter.RunFor(0, &packets); + ASSERT_TRUE(IsTimeSorted(packets)); + ASSERT_TRUE(IsSequenceNumberSorted(packets)); + remaining_packets += packets.size(); + } + + float loss_fraction = 0.01f * (100.0f - loss_percent); + Packets::size_type expected_packets = loss_fraction * sent_packets; + if (zero_tolerance) { + EXPECT_EQ(expected_packets, remaining_packets); + } else { + // Require within 1% of expected + EXPECT_NEAR(expected_packets, remaining_packets, 100); + } +} + +TEST(BweTestFramework_LossFilterTest, Loss0) { + // With 0% loss, the result should be exact (no loss). + TestLossFilter(0.0f, true); +} + +TEST(BweTestFramework_LossFilterTest, Loss10) { + TestLossFilter(10.0f, false); +} + +TEST(BweTestFramework_LossFilterTest, Loss50) { + TestLossFilter(50.0f, false); +} + +TEST(BweTestFramework_LossFilterTest, Loss100) { + // With 100% loss, the result should be exact (no packets out). + TestLossFilter(100.0f, true); +} + +class BweTestFramework_DelayFilterTest : public ::testing::Test { + public: + BweTestFramework_DelayFilterTest() + : filter_(NULL), + now_ms_(0), + sequence_number_(0) { + } + virtual ~BweTestFramework_DelayFilterTest() {} + + protected: + void TestDelayFilter(int64_t run_for_ms, uint32_t in_packets, + uint32_t out_packets) { + Packets packets; + for (uint32_t i = 0; i < in_packets; ++i) { + packets.push_back(Packet(now_ms_ * 1000 + (sequence_number_ >> 4), + sequence_number_)); + sequence_number_++; + } + filter_.RunFor(run_for_ms, &packets); + ASSERT_TRUE(IsTimeSorted(packets)); + ASSERT_TRUE(IsSequenceNumberSorted(packets)); + for (PacketsConstIt it = packets.begin(); it != packets.end(); ++it) { + EXPECT_LE(now_ms_ * 1000, it->send_time_us()); + } + EXPECT_EQ(out_packets, packets.size()); + accumulated_packets_.splice(accumulated_packets_.end(), packets); + now_ms_ += run_for_ms; + } + + void TestDelayFilter(int64_t delay_ms) { + filter_.SetDelay(delay_ms); + TestDelayFilter(1, 0, 0); // No input should yield no output + + // Single packet + TestDelayFilter(0, 1, 1); + TestDelayFilter(delay_ms, 0, 0); + + for (int i = 0; i < delay_ms; ++i) { + filter_.SetDelay(i); + TestDelayFilter(1, 10, 10); + } + TestDelayFilter(0, 0, 0); + TestDelayFilter(delay_ms, 0, 0); + + // Wait a little longer - should still see no output + TestDelayFilter(delay_ms, 0, 0); + + for (int i = 1; i < delay_ms + 1; ++i) { + filter_.SetDelay(i); + TestDelayFilter(1, 5, 5); + } + TestDelayFilter(0, 0, 0); + filter_.SetDelay(2 * delay_ms); + TestDelayFilter(1, 0, 0); + TestDelayFilter(delay_ms, 13, 13); + TestDelayFilter(delay_ms, 0, 0); + + // Wait a little longer - should still see no output + TestDelayFilter(delay_ms, 0, 0); + + for (int i = 0; i < 2 * delay_ms; ++i) { + filter_.SetDelay(2 * delay_ms - i - 1); + TestDelayFilter(1, 5, 5); + } + TestDelayFilter(0, 0, 0); + filter_.SetDelay(0); + TestDelayFilter(0, 7, 7); + + ASSERT_TRUE(IsTimeSorted(accumulated_packets_)); + ASSERT_TRUE(IsSequenceNumberSorted(accumulated_packets_)); + } + + DelayFilter filter_; + Packets accumulated_packets_; + + private: + int64_t now_ms_; + uint32_t sequence_number_; + + DISALLOW_COPY_AND_ASSIGN(BweTestFramework_DelayFilterTest); +}; + +TEST_F(BweTestFramework_DelayFilterTest, Delay0) { + TestDelayFilter(1, 0, 0); // No input should yield no output + TestDelayFilter(1, 10, 10); // Expect no delay (delay time is zero) + TestDelayFilter(1, 0, 0); // Check no packets are still in buffer + filter_.SetDelay(0); + TestDelayFilter(1, 5, 5); // Expect no delay (delay time is zero) + TestDelayFilter(1, 0, 0); // Check no packets are still in buffer +} + +TEST_F(BweTestFramework_DelayFilterTest, Delay1) { + TestDelayFilter(1); +} + +TEST_F(BweTestFramework_DelayFilterTest, Delay2) { + TestDelayFilter(2); +} + +TEST_F(BweTestFramework_DelayFilterTest, Delay20) { + TestDelayFilter(20); +} + +TEST_F(BweTestFramework_DelayFilterTest, Delay100) { + TestDelayFilter(100); +} + +TEST_F(BweTestFramework_DelayFilterTest, JumpToZeroDelay) { + DelayFilter delay(NULL); + Packets acc; + Packets packets; + + // Delay a bunch of packets, accumulate them to the 'acc' list. + delay.SetDelay(100.0f); + for (uint32_t i = 0; i < 10; ++i) { + packets.push_back(Packet(i * 100, i)); + } + delay.RunFor(1000, &packets); + acc.splice(acc.end(), packets); + ASSERT_TRUE(IsTimeSorted(acc)); + ASSERT_TRUE(IsSequenceNumberSorted(acc)); + + // Drop delay to zero, send a few more packets through the delay, append them + // to the 'acc' list and verify that it is all sorted. + delay.SetDelay(0.0f); + for (uint32_t i = 10; i < 50; ++i) { + packets.push_back(Packet(i * 100, i)); + } + delay.RunFor(1000, &packets); + acc.splice(acc.end(), packets); + ASSERT_TRUE(IsTimeSorted(acc)); + ASSERT_TRUE(IsSequenceNumberSorted(acc)); +} + +TEST_F(BweTestFramework_DelayFilterTest, IncreasingDelay) { + // Gradually increase delay. + for (int i = 1; i < 50; i += 4) { + TestDelayFilter(i); + } + // Reach a steady state. + filter_.SetDelay(100); + TestDelayFilter(1, 20, 20); + TestDelayFilter(2, 0, 0); + TestDelayFilter(99, 20, 20); + // Drop delay back down to zero. + filter_.SetDelay(0); + TestDelayFilter(1, 100, 100); + TestDelayFilter(23010, 0, 0); + ASSERT_TRUE(IsTimeSorted(accumulated_packets_)); + ASSERT_TRUE(IsSequenceNumberSorted(accumulated_packets_)); +} + +static void TestJitterFilter(int64_t stddev_jitter_ms) { + JitterFilter filter(NULL); + filter.SetJitter(stddev_jitter_ms); + + int64_t now_ms = 0; + uint32_t sequence_number = 0; + + // Generate packets, add jitter to them, accumulate the altered packets. + Packets original; + Packets jittered; + for (uint32_t i = 0; i < 1000; ++i) { + Packets packets; + for (uint32_t j = 0; j < i % 100; ++j) { + packets.push_back(Packet(now_ms * 1000, sequence_number++)); + now_ms += 5 * stddev_jitter_ms; + } + original.insert(original.end(), packets.begin(), packets.end()); + filter.RunFor(stddev_jitter_ms, &packets); + jittered.splice(jittered.end(), packets); + } + + // Jittered packets should still be in order. + ASSERT_TRUE(IsTimeSorted(original)); + ASSERT_TRUE(IsTimeSorted(jittered)); + ASSERT_TRUE(IsSequenceNumberSorted(original)); + ASSERT_TRUE(IsSequenceNumberSorted(jittered)); + EXPECT_EQ(original.size(), jittered.size()); + + // Make sure jittered and original packets are in same order. Collect time + // difference (jitter) in stats, then check that mean jitter is close to zero + // and standard deviation of jitter is what we set it to. + Stats jitter_us; + for (PacketsIt it1 = original.begin(), it2 = jittered.begin(); + it1 != original.end() && it2 != jittered.end(); ++it1, ++it2) { + EXPECT_EQ(it1->header().sequenceNumber, it2->header().sequenceNumber); + jitter_us.Push(it2->send_time_us() - it1->send_time_us()); + } + EXPECT_NEAR(0.0, jitter_us.GetMean(), stddev_jitter_ms * 1000.0 * 0.008); + EXPECT_NEAR(stddev_jitter_ms * 1000.0, jitter_us.GetStdDev(), + stddev_jitter_ms * 1000.0 * 0.02); +} + +TEST(BweTestFramework_JitterFilterTest, Jitter0) { + TestJitterFilter(0); +} + +TEST(BweTestFramework_JitterFilterTest, Jitter1) { + TestJitterFilter(1); +} + +TEST(BweTestFramework_JitterFilterTest, Jitter5) { + TestJitterFilter(5); +} + +TEST(BweTestFramework_JitterFilterTest, Jitter10) { + TestJitterFilter(10); +} + +TEST(BweTestFramework_JitterFilterTest, Jitter1031) { + TestJitterFilter(1031); +} + +static void TestReorderFilter(uint32_t reorder_percent, uint32_t near) { + const uint32_t kPacketCount = 10000; + + // Generate packets with 10 ms interval. + Packets packets; + int64_t now_ms = 0; + uint32_t sequence_number = 1; + for (uint32_t i = 0; i < kPacketCount; ++i, now_ms += 10) { + packets.push_back(Packet(now_ms * 1000, sequence_number++)); + } + ASSERT_TRUE(IsTimeSorted(packets)); + ASSERT_TRUE(IsSequenceNumberSorted(packets)); + + // Reorder packets, verify that send times are still in order. + ReorderFilter filter(NULL); + filter.SetReorder(reorder_percent); + filter.RunFor(now_ms, &packets); + ASSERT_TRUE(IsTimeSorted(packets)); + + // We measure the amount of reordering by summing the distance by which out- + // of-order packets have been moved in the stream. + uint32_t distance = 0; + uint32_t last_sequence_number = 0; + for (PacketsIt it = packets.begin(); it != packets.end(); ++it) { + uint32_t sequence_number = it->header().sequenceNumber; + if (sequence_number < last_sequence_number) { + distance += last_sequence_number - sequence_number; + } + last_sequence_number = sequence_number; + } + + // Because reordering is random, we allow a threshold when comparing. The + // maximum distance a packet can be moved is PacketCount - 1. + EXPECT_NEAR(((kPacketCount - 1) * reorder_percent) / 100, distance, near); +} + +TEST(BweTestFramework_ReorderFilterTest, Reorder0) { + // For 0% reordering, no packets should have been moved, so result is exact. + TestReorderFilter(0, 0); +} + +TEST(BweTestFramework_ReorderFilterTest, Reorder10) { + TestReorderFilter(10, 30); +} + +TEST(BweTestFramework_ReorderFilterTest, Reorder20) { + TestReorderFilter(20, 20); +} + +TEST(BweTestFramework_ReorderFilterTest, Reorder50) { + TestReorderFilter(50, 20); +} + +TEST(BweTestFramework_ReorderFilterTest, Reorder70) { + TestReorderFilter(70, 20); +} + +TEST(BweTestFramework_ReorderFilterTest, Reorder100) { + // Note that because the implementation works by optionally swapping two + // adjacent packets, when the likelihood of a swap is 1.0, a swap will always + // occur, so the stream will be in order except for the first packet, which + // has been moved to the end. Therefore we expect the result to be exact here. + TestReorderFilter(100.0, 0); +} + +class BweTestFramework_ChokeFilterTest : public ::testing::Test { + public: + BweTestFramework_ChokeFilterTest() + : now_ms_(0), + sequence_number_(0), + output_packets_(), + send_times_us_() { + } + virtual ~BweTestFramework_ChokeFilterTest() {} + + protected: + void TestChoke(PacketProcessor* filter, + int64_t run_for_ms, + uint32_t packets_to_generate, + uint32_t expected_kbit_transmitted) { + // Generate a bunch of packets, apply choke, verify output is ordered. + Packets packets; + RTPHeader header = {0}; + for (uint32_t i = 0; i < packets_to_generate; ++i) { + int64_t send_time_ms = now_ms_ + (i * run_for_ms) / packets_to_generate; + header.sequenceNumber = sequence_number_++; + // Payload is 1000 bits. + packets.push_back(Packet(send_time_ms * 1000, 125, header)); + send_times_us_.push_back(send_time_ms * 1000); + } + ASSERT_TRUE(IsTimeSorted(packets)); + filter->RunFor(run_for_ms, &packets); + now_ms_ += run_for_ms; + output_packets_.splice(output_packets_.end(), packets); + ASSERT_TRUE(IsTimeSorted(output_packets_)); + ASSERT_TRUE(IsSequenceNumberSorted(output_packets_)); + + // Sum up the transmitted bytes up until the current time. + uint32_t bytes_transmitted = 0; + while (!output_packets_.empty()) { + const Packet& packet = output_packets_.front(); + if (packet.send_time_us() > now_ms_ * 1000) { + break; + } + bytes_transmitted += packet.payload_size(); + output_packets_.pop_front(); + } + EXPECT_EQ(expected_kbit_transmitted, (bytes_transmitted * 8) / 1000); + } + + void CheckMaxDelay(int64_t max_delay_ms) { + for (PacketsIt it = output_packets_.begin(); it != output_packets_.end(); + ++it) { + const Packet& packet = *it; + int64_t delay_us = packet.send_time_us() - + send_times_us_[packet.header().sequenceNumber]; + EXPECT_GE(max_delay_ms * 1000, delay_us); + } + } + + private: + int64_t now_ms_; + uint32_t sequence_number_; + Packets output_packets_; + std::vector send_times_us_; + + DISALLOW_COPY_AND_ASSIGN(BweTestFramework_ChokeFilterTest); +}; + +TEST_F(BweTestFramework_ChokeFilterTest, Short) { + // 100ms, 100 packets, 10 kbps choke -> 1 kbit of data should have propagated. + // That is actually just a single packet, since each packet has 1000 bits of + // payload. + ChokeFilter filter(NULL); + filter.SetCapacity(10); + TestChoke(&filter, 100, 100, 1); +} + +TEST_F(BweTestFramework_ChokeFilterTest, Medium) { + // 100ms, 10 packets, 10 kbps choke -> 1 packet through, or 1 kbit. + ChokeFilter filter(NULL); + filter.SetCapacity(10); + TestChoke(&filter, 100, 10, 1); + // 200ms, no new packets -> another packet through. + TestChoke(&filter, 100, 0, 1); + // 1000ms, no new packets -> 8 more packets. + TestChoke(&filter, 800, 0, 8); + // 2000ms, no new packets -> queue is empty so no output. + TestChoke(&filter, 1000, 0, 0); +} + +TEST_F(BweTestFramework_ChokeFilterTest, Long) { + // 100ms, 100 packets in queue, 10 kbps choke -> 1 packet through, or 1 kbit. + ChokeFilter filter(NULL); + filter.SetCapacity(10); + TestChoke(&filter, 100, 100, 1); + // 200ms, no input, another packet through. + TestChoke(&filter, 100, 0, 1); + // 1000ms, no input, 8 packets through. + TestChoke(&filter, 800, 0, 8); + // 10000ms, no input, raise choke to 100 kbps. Remaining 90 packets in queue + // should be propagated, for a total of 90 kbps. + filter.SetCapacity(100); + TestChoke(&filter, 9000, 0, 90); + // 10100ms, 20 more packets -> 10 packets or 10 kbit through. + TestChoke(&filter, 100, 20, 10); + // 10300ms, 10 more packets -> 20 packets out. + TestChoke(&filter, 200, 10, 20); + // 11300ms, no input, queue should be empty. + filter.SetCapacity(10); + TestChoke(&filter, 1000, 0, 0); +} + +TEST_F(BweTestFramework_ChokeFilterTest, MaxDelay) { + // 10 kbps choke, 500 ms delay cap + ChokeFilter filter(NULL); + filter.SetCapacity(10); + filter.SetMaxDelay(500); + // 100ms, 100 packets in queue, 10 kbps choke -> 1 packet through, or 1 kbit. + TestChoke(&filter, 100, 100, 1); + CheckMaxDelay(500); + // 500ms, no input, 4 more packets through. + TestChoke(&filter, 400, 0, 4); + // 10000ms, no input, remaining packets should have been dropped. + TestChoke(&filter, 9500, 0, 0); + + // 100 ms delay cap + filter.SetMaxDelay(100); + // 10100ms, 50 more packets -> 2 packets or 2 kbit through. + TestChoke(&filter, 100, 50, 2); + CheckMaxDelay(100); + // 20000ms, no input, remaining packets in queue should have been dropped. + TestChoke(&filter, 9900, 0, 0); + + // Reset delay cap (0 is no cap) and verify no packets are dropped. + filter.SetCapacity(10); + filter.SetMaxDelay(0); + TestChoke(&filter, 100, 100, 2); + TestChoke(&filter, 9900, 0, 98); +} + +TEST_F(BweTestFramework_ChokeFilterTest, ShortTrace) { + // According to the input file 6 packets should be transmitted within + // 100 milliseconds. + TraceBasedDeliveryFilter filter(NULL); + ASSERT_TRUE(filter.Init(test::ResourcePath("synthetic-trace", "rx"))); + TestChoke(&filter, 100, 100, 6); +} + +TEST_F(BweTestFramework_ChokeFilterTest, ShortTraceWrap) { + // According to the input file 10 packets should be transmitted within + // 140 milliseconds (at the wrapping point two packets are sent back to back). + TraceBasedDeliveryFilter filter(NULL); + ASSERT_TRUE(filter.Init(test::ResourcePath("synthetic-trace", "rx"))); + TestChoke(&filter, 140, 100, 10); +} + +void TestVideoSender(VideoSender* sender, int64_t run_for_ms, + uint32_t expected_packets, + uint32_t expected_payload_size, + uint32_t expected_total_payload_size) { + assert(sender); + Packets packets; + sender->RunFor(run_for_ms, &packets); + ASSERT_TRUE(IsTimeSorted(packets)); + ASSERT_TRUE(IsSequenceNumberSorted(packets)); + EXPECT_EQ(expected_packets, packets.size()); + int64_t send_time_us = -1; + uint32_t total_payload_size = 0; + uint32_t absolute_send_time = 0; + uint32_t absolute_send_time_wraps = 0; + uint32_t rtp_timestamp = 0; + uint32_t rtp_timestamp_wraps = 0; + for (PacketsIt it = packets.begin(); it != packets.end(); ++it) { + EXPECT_LE(send_time_us, it->send_time_us()); + send_time_us = it->send_time_us(); + if (sender->max_payload_size_bytes() != it->payload_size()) { + EXPECT_EQ(expected_payload_size, it->payload_size()); + } + total_payload_size += it->payload_size(); + if (absolute_send_time > it->header().extension.absoluteSendTime) { + absolute_send_time_wraps++; + } + absolute_send_time = it->header().extension.absoluteSendTime; + if (rtp_timestamp > it->header().timestamp) { + rtp_timestamp_wraps++; + } + rtp_timestamp = it->header().timestamp; + } + EXPECT_EQ(expected_total_payload_size, total_payload_size); + EXPECT_GE(1u, absolute_send_time_wraps); + EXPECT_GE(1u, rtp_timestamp_wraps); +} + +TEST(BweTestFramework_VideoSenderTest, Fps1Kpbs80_1s) { + // 1 fps, 80 kbps + VideoSender sender(NULL, 1.0f, 80, 0x1234, 0); + EXPECT_EQ(10000u, sender.bytes_per_second()); + // We're at 1 fps, so all packets should be generated on first call, giving 10 + // packets of each 1000 bytes, total 10000 bytes. + TestVideoSender(&sender, 1, 10, 1000, 10000); + // 999ms, should see no output here. + TestVideoSender(&sender, 998, 0, 0, 0); + // 1999ms, should get data for one more frame. + TestVideoSender(&sender, 1000, 10, 1000, 10000); + // 2000ms, one more frame. + TestVideoSender(&sender, 1, 10, 1000, 10000); + // 2999ms, should see nothing. + TestVideoSender(&sender, 999, 0, 0, 0); +} + +TEST(BweTestFramework_VideoSenderTest, Fps1Kpbs80_1s_Offset) { + // 1 fps, 80 kbps, offset 0.5 of a frame period, ==0.5s in this case. + VideoSender sender(NULL, 1.0f, 80, 0x1234, 0.5f); + EXPECT_EQ(10000u, sender.bytes_per_second()); + // 499ms, no output. + TestVideoSender(&sender, 499, 0, 0, 0); + // 500ms, first frame (this is the offset we set), 10 packets of 1000 bytes. + TestVideoSender(&sender, 1, 10, 1000, 10000); + // 1499ms, nothing. + TestVideoSender(&sender, 999, 0, 0, 0); + // 1999ms, second frame. + TestVideoSender(&sender, 500, 10, 1000, 10000); + // 2499ms, nothing. + TestVideoSender(&sender, 500, 0, 0, 0); + // 2500ms, third frame. + TestVideoSender(&sender, 1, 10, 1000, 10000); + // 3499ms, nothing. + TestVideoSender(&sender, 999, 0, 0, 0); +} + +TEST(BweTestFramework_VideoSenderTest, Fps50Kpbs80_11s) { + // 50 fps, 80 kbps. + VideoSender sender(NULL, 50.0f, 80, 0x1234, 0); + EXPECT_EQ(10000u, sender.bytes_per_second()); + // 9998ms, should see 500 frames, 200 byte payloads, total 100000 bytes. + TestVideoSender(&sender, 9998, 500, 200, 100000); + // 9999ms, nothing. + TestVideoSender(&sender, 1, 0, 0, 0); + // 10000ms, 501st frame as a single packet. + TestVideoSender(&sender, 1, 1, 200, 200); + // 10998ms, 49 more frames. + TestVideoSender(&sender, 998, 49, 200, 9800); + // 10999ms, nothing. + TestVideoSender(&sender, 1, 0, 0, 0); +} + +TEST(BweTestFramework_VideoSenderTest, Fps10Kpbs120_1s) { + // 20 fps, 120 kbps. + VideoSender sender(NULL, 20.0f, 120, 0x1234, 0); + EXPECT_EQ(15000u, sender.bytes_per_second()); + // 498ms, 10 frames with 750 byte payloads, total 7500 bytes. + TestVideoSender(&sender, 498, 10, 750, 7500); + // 499ms, nothing. + TestVideoSender(&sender, 1, 0, 0, 0); + // 500ms, one more frame. + TestVideoSender(&sender, 1, 1, 750, 750); + // 998ms, 9 more frames. + TestVideoSender(&sender, 498, 9, 750, 6750); + // 999ms, nothing. + TestVideoSender(&sender, 1, 0, 0, 0); +} + +TEST(BweTestFramework_VideoSenderTest, Fps30Kpbs800_20s) { + // 20 fps, 820 kbps. + VideoSender sender(NULL, 25.0f, 820, 0x1234, 0); + EXPECT_EQ(102500u, sender.bytes_per_second()); + // 9998ms, 250 frames. 820 kbps = 102500 bytes/s, so total should be 1025000. + // Each frame is 102500/25=4100 bytes, or 5 packets (4 @1000 bytes, 1 @100), + // so packet count should be 5*250=1250 and last packet of each frame has + // 100 bytes of payload. + TestVideoSender(&sender, 9998, 1250, 100, 1025000); + // 9999ms, nothing. + TestVideoSender(&sender, 1, 0, 0, 0); + // 19998ms, 250 more frames. + TestVideoSender(&sender, 9999, 1250, 100, 1025000); + // 19999ms, nothing. + TestVideoSender(&sender, 1, 0, 0, 0); + // 20038ms, one more frame, as described above (25fps == 40ms/frame). + TestVideoSender(&sender, 39, 5, 100, 4100); + // 20039ms, nothing. + TestVideoSender(&sender, 1, 0, 0, 0); +} + +TEST(BweTestFramework_VideoSenderTest, TestAppendInOrder) { + // 1 fps, 80 kbps, 250ms offset. + VideoSender sender1(NULL, 1.0f, 80, 0x1234, 0.25f); + EXPECT_EQ(10000u, sender1.bytes_per_second()); + Packets packets; + // Generate some packets, verify they are sorted. + sender1.RunFor(999, &packets); + ASSERT_TRUE(IsTimeSorted(packets)); + ASSERT_TRUE(IsSequenceNumberSorted(packets)); + EXPECT_EQ(10u, packets.size()); + // Generate some more packets and verify they are appended to end of list. + sender1.RunFor(1000, &packets); + ASSERT_TRUE(IsTimeSorted(packets)); + ASSERT_TRUE(IsSequenceNumberSorted(packets)); + EXPECT_EQ(20u, packets.size()); + + // Another sender, 2 fps, 160 kpbs, 150ms offset + VideoSender sender2(NULL, 2.0f, 160, 0x2234, 0.30f); + EXPECT_EQ(20000u, sender2.bytes_per_second()); + // Generate some packets, verify that they are merged with the packets already + // on the list. + sender2.RunFor(999, &packets); + ASSERT_TRUE(IsTimeSorted(packets)); + EXPECT_EQ(40u, packets.size()); + // Generate some more. + sender2.RunFor(1000, &packets); + ASSERT_TRUE(IsTimeSorted(packets)); + EXPECT_EQ(60u, packets.size()); +} + +TEST(BweTestFramework_VideoSenderTest, FeedbackIneffective) { + VideoSender sender(NULL, 25.0f, 820, 0x1234, 0); + EXPECT_EQ(102500u, sender.bytes_per_second()); + TestVideoSender(&sender, 9998, 1250, 100, 1025000); + + // Make sure feedback has no effect on a regular video sender. + PacketSender::Feedback feedback = { 512000 }; + sender.GiveFeedback(feedback); + EXPECT_EQ(102500u, sender.bytes_per_second()); + TestVideoSender(&sender, 9998, 1250, 100, 1025000); +} + +TEST(BweTestFramework_AdaptiveVideoSenderTest, FeedbackChangesBitrate) { + AdaptiveVideoSender sender(NULL, 25.0f, 820, 0x1234, 0); + EXPECT_EQ(102500u, sender.bytes_per_second()); + TestVideoSender(&sender, 9998, 1250, 100, 1025000); + + // Make sure we can reduce the bitrate. + PacketSender::Feedback feedback = { 512000 }; + sender.GiveFeedback(feedback); + EXPECT_EQ(64000u, sender.bytes_per_second()); + TestVideoSender(&sender, 9998, 750, 560, 640000); + + // Increase the bitrate to the initial bitrate and verify that the output is + // the same. + feedback.estimated_bps = 820000; + sender.GiveFeedback(feedback); + EXPECT_EQ(102500u, sender.bytes_per_second()); + TestVideoSender(&sender, 9998, 1250, 100, 1025000); +} +} // namespace bwe +} // namespace testing +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.cc b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.cc new file mode 100644 index 000000000000..1a43f09c215a --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.cc @@ -0,0 +1,163 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h" + +#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE + +#include +#include +#include + +#include "webrtc/system_wrappers/interface/critical_section_wrapper.h" +#include "webrtc/system_wrappers/interface/thread_wrapper.h" + +namespace webrtc { +namespace testing { +namespace bwe { + +Logging Logging::g_Logging; + +static std::string ToString(uint32_t v) { + const size_t kBufferSize = 16; + char string_buffer[kBufferSize] = {0}; +#if defined(_MSC_VER) && defined(_WIN32) + _snprintf(string_buffer, kBufferSize - 1, "%08x", v); +#else + snprintf(string_buffer, kBufferSize, "%08x", v); +#endif + return string_buffer; +} + +Logging::Context::Context(uint32_t name, int64_t timestamp_ms, bool enabled) { + Logging::GetInstance()->PushState(ToString(name), timestamp_ms, enabled); +} + +Logging::Context::Context(const std::string& name, int64_t timestamp_ms, + bool enabled) { + Logging::GetInstance()->PushState(name, timestamp_ms, enabled); +} + +Logging::Context::Context(const char* name, int64_t timestamp_ms, + bool enabled) { + Logging::GetInstance()->PushState(name, timestamp_ms, enabled); +} + +Logging::Context::~Context() { + Logging::GetInstance()->PopState(); +} + +Logging* Logging::GetInstance() { + return &g_Logging; +} + +void Logging::SetGlobalContext(uint32_t name) { + CriticalSectionScoped cs(crit_sect_.get()); + thread_map_[ThreadWrapper::GetThreadId()].global_state.tag = ToString(name); +} + +void Logging::SetGlobalContext(const std::string& name) { + CriticalSectionScoped cs(crit_sect_.get()); + thread_map_[ThreadWrapper::GetThreadId()].global_state.tag = name; +} + +void Logging::SetGlobalContext(const char* name) { + CriticalSectionScoped cs(crit_sect_.get()); + thread_map_[ThreadWrapper::GetThreadId()].global_state.tag = name; +} + +void Logging::SetGlobalEnable(bool enabled) { + CriticalSectionScoped cs(crit_sect_.get()); + thread_map_[ThreadWrapper::GetThreadId()].global_state.enabled = enabled; +} + +void Logging::Log(const char format[], ...) { + CriticalSectionScoped cs(crit_sect_.get()); + ThreadMap::iterator it = thread_map_.find(ThreadWrapper::GetThreadId()); + assert(it != thread_map_.end()); + const State& state = it->second.stack.top(); + if (state.enabled) { + printf("%s\t", state.tag.c_str()); + va_list args; + va_start(args, format); + vprintf(format, args); + va_end(args); + printf("\n"); + } +} + +void Logging::Plot(double value) { + CriticalSectionScoped cs(crit_sect_.get()); + ThreadMap::iterator it = thread_map_.find(ThreadWrapper::GetThreadId()); + assert(it != thread_map_.end()); + const State& state = it->second.stack.top(); + if (state.enabled) { + printf("PLOT\t%s\t%f\t%f\n", state.tag.c_str(), state.timestamp_ms * 0.001, + value); + } +} + +Logging::Logging() + : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()), + thread_map_() { +} + +Logging::State::State() : tag(""), timestamp_ms(0), enabled(true) {} + +Logging::State::State(const std::string& tag, int64_t timestamp_ms, + bool enabled) + : tag(tag), + timestamp_ms(timestamp_ms), + enabled(enabled) { +} + +void Logging::State::MergePrevious(const State& previous) { + if (tag == "") { + tag = previous.tag; + } else if (previous.tag != "") { + tag = previous.tag + "_" + tag; + } + timestamp_ms = std::max(previous.timestamp_ms, timestamp_ms); + enabled = previous.enabled && enabled; +} + +void Logging::PushState(const std::string& append_to_tag, int64_t timestamp_ms, + bool enabled) { + CriticalSectionScoped cs(crit_sect_.get()); + State new_state(append_to_tag, timestamp_ms, enabled); + ThreadState* thread_state = &thread_map_[ThreadWrapper::GetThreadId()]; + std::stack* stack = &thread_state->stack; + if (stack->empty()) { + new_state.MergePrevious(thread_state->global_state); + } else { + new_state.MergePrevious(stack->top()); + } + stack->push(new_state); +} + +void Logging::PopState() { + CriticalSectionScoped cs(crit_sect_.get()); + ThreadMap::iterator it = thread_map_.find(ThreadWrapper::GetThreadId()); + assert(it != thread_map_.end()); + std::stack* stack = &it->second.stack; + int64_t newest_timestamp_ms = stack->top().timestamp_ms; + stack->pop(); + if (!stack->empty()) { + State* state = &stack->top(); + // Update time so that next log/plot will use the latest time seen so far + // in this call tree. + state->timestamp_ms = std::max(state->timestamp_ms, newest_timestamp_ms); + } +} +} // namespace bwe +} // namespace testing +} // namespace webrtc + +#endif // BWE_TEST_LOGGING_COMPILE_TIME_ENABLE diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h new file mode 100644 index 000000000000..c9497635555f --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h @@ -0,0 +1,225 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TEST_BWE_TEST_LOGGING_H_ +#define WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TEST_BWE_TEST_LOGGING_H_ + +// To enable BWE logging, run this command from trunk/ : +// build/gyp_chromium --depth=. webrtc/modules/modules.gyp +// -Denable_bwe_test_logging=1 +#ifndef BWE_TEST_LOGGING_COMPILE_TIME_ENABLE +#define BWE_TEST_LOGGING_COMPILE_TIME_ENABLE 0 +#endif // BWE_TEST_LOGGING_COMPILE_TIME_ENABLE + +// BWE logging allows you to insert dynamically named log/plot points in the +// call tree. E.g. the function: +// void f1() { +// BWE_TEST_LOGGING_TIME(clock_->TimeInMilliseconds()); +// BWE_TEST_LOGGING_CONTEXT("stream"); +// for (uint32_t i=0; i<4; ++i) { +// BWE_TEST_LOGGING_ENABLE(i & 1); +// BWE_TEST_LOGGING_CONTEXT(i); +// BWE_TEST_LOGGING_LOG1("weight", "%f tonnes", weights_[i]); +// for (float j=0.0f; j<1.0; j+=0.4f) { +// BWE_TEST_LOGGING_PLOT("bps", -1, j); +// } +// } +// } +// +// Might produce the output: +// stream_00000001_weight 13.000000 tonnes +// PLOT stream_00000001_bps 1.000000 0.000000 +// PLOT stream_00000001_bps 1.000000 0.400000 +// PLOT stream_00000001_bps 1.000000 0.800000 +// stream_00000003_weight 39.000000 tonnes +// PLOT stream_00000003_bps 1.000000 0.000000 +// PLOT stream_00000003_bps 1.000000 0.400000 +// PLOT stream_00000003_bps 1.000000 0.800000 +// +// Log *contexts* are names concatenated with '_' between them, with the name +// of the logged/plotted string/value last. Plot *time* is inherited down the +// tree. A branch is enabled by default but can be *disabled* to reduce output. +// The difference between the LOG and PLOT macros is that PLOT prefixes the line +// so it can be easily filtered, plus it outputs the current time. + +#if !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE) + +// Set a thread-global base logging context. This name will be prepended to all +// hierarchical contexts. +// |name| is a char*, std::string or uint32_t to name the context. +#define BWE_TEST_LOGGING_GLOBAL_CONTEXT(name) + +// Thread-globally allow/disallow logging. +// |enable| is expected to be a bool. +#define BWE_TEST_LOGGING_GLOBAL_ENABLE(enabled) + +// Insert a (hierarchical) logging context. +// |name| is a char*, std::string or uint32_t to name the context. +#define BWE_TEST_LOGGING_CONTEXT(name) + +// Allow/disallow logging down the call tree from this point. Logging must be +// enabled all the way to the root of the call tree to take place. +// |enable| is expected to be a bool. +#define BWE_TEST_LOGGING_ENABLE(enabled) + +// Set current time (only affects PLOT output). Down the call tree, the latest +// time set always takes precedence. +// |time| is an int64_t time in ms, or -1 to inherit time from previous context. +#define BWE_TEST_LOGGING_TIME(time) + +// Print to stdout, e.g.: +// Context1_Context2_Name printf-formated-string +// |name| is a char*, std::string or uint32_t to name the log line. +// |format| is a printf format string. +// |_1...| are arguments for printf. +#define BWE_TEST_LOGGING_LOG1(name, format, _1) +#define BWE_TEST_LOGGING_LOG2(name, format, _1, _2) +#define BWE_TEST_LOGGING_LOG3(name, format, _1, _2, _3) +#define BWE_TEST_LOGGING_LOG4(name, format, _1, _2, _3, _4) +#define BWE_TEST_LOGGING_LOG5(name, format, _1, _2, _3, _4, _5) + +// Print to stdout in tab-separated format suitable for plotting, e.g.: +// PLOT Context1_Context2_Name time value +// |name| is a char*, std::string or uint32_t to name the plotted value. +// |time| is an int64_t time in ms, or -1 to inherit time from previous context. +// |value| is a double precision float to be plotted. +#define BWE_TEST_LOGGING_PLOT(name, time, value) + +#else // BWE_TEST_LOGGING_COMPILE_TIME_ENABLE + +#include +#include +#include + +#include "webrtc/common_types.h" +#include "webrtc/system_wrappers/interface/constructor_magic.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" + +#define BWE_TEST_LOGGING_GLOBAL_CONTEXT(name) \ + do { \ + webrtc::testing::bwe::Logging::GetInstance()->SetGlobalContext(name); \ + } while (0); + +#define BWE_TEST_LOGGING_GLOBAL_ENABLE(enabled) \ + do { \ + webrtc::testing::bwe::Logging::GetInstance()->SetGlobalEnable(enabled); \ + } while (0); + +#define __BWE_TEST_LOGGING_CONTEXT_NAME(ctx, line) ctx ## line +#define __BWE_TEST_LOGGING_CONTEXT_DECLARE(ctx, line, name, time, enabled) \ + webrtc::testing::bwe::Logging::Context \ + __BWE_TEST_LOGGING_CONTEXT_NAME(ctx, line)(name, time, enabled) + +#define BWE_TEST_LOGGING_CONTEXT(name) \ + __BWE_TEST_LOGGING_CONTEXT_DECLARE(__bwe_log_, __LINE__, name, -1, true) +#define BWE_TEST_LOGGING_ENABLE(enabled) \ + __BWE_TEST_LOGGING_CONTEXT_DECLARE(__bwe_log_, __LINE__, "", -1, \ + static_cast(enabled)) +#define BWE_TEST_LOGGING_TIME(time) \ + __BWE_TEST_LOGGING_CONTEXT_DECLARE(__bwe_log_, __LINE__, "", \ + static_cast(time), true) + +#define BWE_TEST_LOGGING_LOG1(name, format, _1) \ + do { \ + BWE_TEST_LOGGING_CONTEXT(name); \ + webrtc::testing::bwe::Logging::GetInstance()->Log(format, _1); \ + } while (0); +#define BWE_TEST_LOGGING_LOG2(name, format, _1, _2) \ + do { \ + BWE_TEST_LOGGING_CONTEXT(name); \ + webrtc::testing::bwe::Logging::GetInstance()->Log(format, _1, _2); \ + } while (0); +#define BWE_TEST_LOGGING_LOG3(name, format, _1, _2, _3) \ + do { \ + BWE_TEST_LOGGING_CONTEXT(name); \ + webrtc::testing::bwe::Logging::GetInstance()->Log(format, _1, _2, _3); \ + } while (0); +#define BWE_TEST_LOGGING_LOG4(name, format, _1, _2, _3, _4) \ + do { \ + BWE_TEST_LOGGING_CONTEXT(name); \ + webrtc::testing::bwe::Logging::GetInstance()->Log(format, _1, _2, _3, \ + _4); \ + } while (0); +#define BWE_TEST_LOGGING_LOG5(name, format, _1, _2, _3, _4, _5) \ + do {\ + BWE_TEST_LOGGING_CONTEXT(name); \ + webrtc::testing::bwe::Logging::GetInstance()->Log(format, _1, _2, _3, \ + _4, _5); \ + } while (0); + +#define BWE_TEST_LOGGING_PLOT(name, time, value)\ + do { \ + __BWE_TEST_LOGGING_CONTEXT_DECLARE(__bwe_log_, __LINE__, name, \ + static_cast(time), true); \ + webrtc::testing::bwe::Logging::GetInstance()->Plot(value); \ + } while (0); + +namespace webrtc { + +class CriticalSectionWrapper; + +namespace testing { +namespace bwe { + +class Logging { + public: + class Context { + public: + Context(uint32_t name, int64_t timestamp_ms, bool enabled); + Context(const std::string& name, int64_t timestamp_ms, bool enabled); + Context(const char* name, int64_t timestamp_ms, bool enabled); + ~Context(); + private: + DISALLOW_IMPLICIT_CONSTRUCTORS(Context); + }; + + static Logging* GetInstance(); + + void SetGlobalContext(uint32_t name); + void SetGlobalContext(const std::string& name); + void SetGlobalContext(const char* name); + void SetGlobalEnable(bool enabled); + + void Log(const char format[], ...); + void Plot(double value); + + private: + struct State { + State(); + State(const std::string& new_tag, int64_t timestamp_ms, bool enabled); + void MergePrevious(const State& previous); + + std::string tag; + int64_t timestamp_ms; + bool enabled; + }; + struct ThreadState { + State global_state; + std::stack stack; + }; + typedef std::map ThreadMap; + + Logging(); + void PushState(const std::string& append_to_tag, int64_t timestamp_ms, + bool enabled); + void PopState(); + + static Logging g_Logging; + scoped_ptr crit_sect_; + ThreadMap thread_map_; + + DISALLOW_COPY_AND_ASSIGN(Logging); +}; +} // namespace bwe +} // namespace testing +} // namespace webrtc + +#endif // BWE_TEST_LOGGING_COMPILE_TIME_ENABLE +#endif // WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TEST_BWE_TEST_LOGGING_H_ diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc new file mode 100644 index 000000000000..688d9d46c57e --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc @@ -0,0 +1,72 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.h" + +#include +#include + +#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" +#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h" +#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h" +#include "webrtc/modules/video_coding/main/test/rtp_file_reader.h" +#include "webrtc/modules/video_coding/main/test/rtp_player.h" + +using webrtc::rtpplayer::RtpPacketSourceInterface; + +const int kMinBitrateBps = 30000; + +bool ParseArgsAndSetupEstimator(int argc, + char** argv, + webrtc::Clock* clock, + webrtc::RemoteBitrateObserver* observer, + RtpPacketSourceInterface** rtp_reader, + webrtc::RtpHeaderParser** parser, + webrtc::RemoteBitrateEstimator** estimator, + std::string* estimator_used) { + *rtp_reader = webrtc::rtpplayer::CreateRtpFileReader(argv[3]); + if (!*rtp_reader) { + printf("Cannot open input file %s\n", argv[3]); + return false; + } + printf("Input file: %s\n\n", argv[3]); + webrtc::RTPExtensionType extension = webrtc::kRtpExtensionAbsoluteSendTime; + + if (strncmp("tsoffset", argv[1], 8) == 0) { + extension = webrtc::kRtpExtensionTransmissionTimeOffset; + printf("Extension: toffset\n"); + } else { + printf("Extension: abs\n"); + } + int id = atoi(argv[2]); + + // Setup the RTP header parser and the bitrate estimator. + *parser = webrtc::RtpHeaderParser::Create(); + (*parser)->RegisterRtpHeaderExtension(extension, id); + if (estimator) { + switch (extension) { + case webrtc::kRtpExtensionAbsoluteSendTime: { + webrtc::AbsoluteSendTimeRemoteBitrateEstimatorFactory factory; + *estimator = factory.Create(observer, clock, kMinBitrateBps); + *estimator_used = "AbsoluteSendTimeRemoteBitrateEstimator"; + break; + } + case webrtc::kRtpExtensionTransmissionTimeOffset: { + webrtc::RemoteBitrateEstimatorFactory factory; + *estimator = factory.Create(observer, clock, kMinBitrateBps); + *estimator_used = "RemoteBitrateEstimator"; + break; + } + default: + assert(false); + } + } + return true; +} diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.h b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.h new file mode 100644 index 000000000000..714457d5668e --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.h @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TOOLS_BWE_RTP_H_ +#define WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TOOLS_BWE_RTP_H_ + +#include + +namespace webrtc { +class Clock; +class RemoteBitrateEstimator; +class RemoteBitrateObserver; +class RtpHeaderParser; +namespace rtpplayer { +class RtpPacketSourceInterface; +} +} + +bool ParseArgsAndSetupEstimator( + int argc, + char** argv, + webrtc::Clock* clock, + webrtc::RemoteBitrateObserver* observer, + webrtc::rtpplayer::RtpPacketSourceInterface** rtp_reader, + webrtc::RtpHeaderParser** parser, + webrtc::RemoteBitrateEstimator** estimator, + std::string* estimator_used); + +#endif // WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TOOLS_BWE_RTP_H_ diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc new file mode 100644 index 000000000000..9ea3f08eab55 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc @@ -0,0 +1,116 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include + +#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" +#include "webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.h" +#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h" +#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h" +#include "webrtc/modules/video_coding/main/test/rtp_file_reader.h" +#include "webrtc/modules/video_coding/main/test/rtp_player.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" + +using webrtc::rtpplayer::RtpPacketSourceInterface; + +class Observer : public webrtc::RemoteBitrateObserver { + public: + explicit Observer(webrtc::Clock* clock) : clock_(clock) {} + + // Called when a receive channel group has a new bitrate estimate for the + // incoming streams. + virtual void OnReceiveBitrateChanged(const std::vector& ssrcs, + unsigned int bitrate) { + printf("[%u] Num SSRCs: %d, bitrate: %u\n", + static_cast(clock_->TimeInMilliseconds()), + static_cast(ssrcs.size()), bitrate); + } + + virtual ~Observer() {} + + private: + webrtc::Clock* clock_; +}; + +int main(int argc, char** argv) { + if (argc < 4) { + printf("Usage: bwe_rtp_play " + "\n"); + printf(" can either be:\n" + " abs for absolute send time or\n" + " tsoffset for timestamp offset.\n" + " is the id associated with the extension.\n"); + return -1; + } + RtpPacketSourceInterface* reader; + webrtc::RemoteBitrateEstimator* estimator; + webrtc::RtpHeaderParser* parser; + std::string estimator_used; + webrtc::SimulatedClock clock(0); + Observer observer(&clock); + if (!ParseArgsAndSetupEstimator(argc, argv, &clock, &observer, &reader, + &parser, &estimator, &estimator_used)) { + return -1; + } + webrtc::scoped_ptr rtp_reader(reader); + webrtc::scoped_ptr rtp_parser(parser); + webrtc::scoped_ptr rbe(estimator); + + // Process the file. + int packet_counter = 0; + int64_t next_process_time_ms = 0; + int64_t next_rtp_time_ms = 0; + int64_t first_rtp_time_ms = -1; + const uint32_t kMaxPacketSize = 1500; + uint8_t packet_buffer[kMaxPacketSize]; + uint8_t* packet = packet_buffer; + int non_zero_abs_send_time = 0; + int non_zero_ts_offsets = 0; + while (true) { + uint32_t next_rtp_time; + if (next_rtp_time_ms <= clock.TimeInMilliseconds()) { + uint32_t packet_length = kMaxPacketSize; + if (rtp_reader->NextPacket(packet, &packet_length, + &next_rtp_time) == -1) { + break; + } + if (first_rtp_time_ms == -1) + first_rtp_time_ms = next_rtp_time; + next_rtp_time_ms = next_rtp_time - first_rtp_time_ms; + webrtc::RTPHeader header; + parser->Parse(packet, packet_length, &header); + if (header.extension.absoluteSendTime != 0) + ++non_zero_abs_send_time; + if (header.extension.transmissionTimeOffset != 0) + ++non_zero_ts_offsets; + rbe->IncomingPacket(clock.TimeInMilliseconds(), + packet_length - header.headerLength, + header); + ++packet_counter; + } + next_process_time_ms = rbe->TimeUntilNextProcess() + + clock.TimeInMilliseconds(); + if (next_process_time_ms <= clock.TimeInMilliseconds()) { + rbe->Process(); + } + int time_until_next_event = + std::min(next_process_time_ms, next_rtp_time_ms) - + clock.TimeInMilliseconds(); + clock.AdvanceTimeMilliseconds(std::max(time_until_next_event, 0)); + } + printf("Parsed %d packets\nTime passed: %u ms\n", packet_counter, + static_cast(clock.TimeInMilliseconds())); + printf("Estimator used: %s\n", estimator_used.c_str()); + printf("Packets with non-zero absolute send time: %d\n", + non_zero_abs_send_time); + printf("Packets with non-zero timestamp offset: %d\n", + non_zero_ts_offsets); + return 0; +} diff --git a/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/tools/rtp_to_text.cc b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/tools/rtp_to_text.cc new file mode 100644 index 000000000000..2c69f25a1f5f --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/tools/rtp_to_text.cc @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include + +#include "webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.h" +#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h" +#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h" +#include "webrtc/modules/video_coding/main/test/rtp_file_reader.h" +#include "webrtc/modules/video_coding/main/test/rtp_player.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" + +using webrtc::rtpplayer::RtpPacketSourceInterface; + +int main(int argc, char** argv) { + if (argc < 5) { + printf("Usage: rtp_to_text " + " \n"); + printf(" can either be:\n" + " abs for absolute send time or\n" + " tsoffset for timestamp offset.\n" + " is the id associated with the extension.\n"); + return -1; + } + RtpPacketSourceInterface* reader; + webrtc::RtpHeaderParser* parser; + if (!ParseArgsAndSetupEstimator(argc, argv, NULL, NULL, &reader, &parser, + NULL, NULL)) { + return -1; + } + webrtc::scoped_ptr rtp_reader(reader); + webrtc::scoped_ptr rtp_parser(parser); + + FILE* out_file = fopen(argv[4], "wt"); + if (!out_file) + printf("Cannot open output file %s\n", argv[4]); + + printf("Output file: %s\n\n", argv[4]); + fprintf(out_file, "seqnum timestamp ts_offset abs_sendtime recvtime " + "markerbit ssrc size\n"); + int packet_counter = 0; + static const uint32_t kMaxPacketSize = 1500; + uint8_t packet_buffer[kMaxPacketSize]; + uint8_t* packet = packet_buffer; + uint32_t packet_length = kMaxPacketSize; + uint32_t time_ms = 0; + int non_zero_abs_send_time = 0; + int non_zero_ts_offsets = 0; + while (rtp_reader->NextPacket(packet, &packet_length, &time_ms) == 0) { + webrtc::RTPHeader header = {}; + parser->Parse(packet, packet_length, &header); + if (header.extension.absoluteSendTime != 0) + ++non_zero_abs_send_time; + if (header.extension.transmissionTimeOffset != 0) + ++non_zero_ts_offsets; + fprintf(out_file, "%u %u %d %u %u %d %u %u\n", header.sequenceNumber, + header.timestamp, header.extension.transmissionTimeOffset, + header.extension.absoluteSendTime, time_ms, header.markerBit, + header.ssrc, packet_length); + packet_length = kMaxPacketSize; + ++packet_counter; + } + printf("Parsed %d packets\n", packet_counter); + printf("Packets with non-zero absolute send time: %d\n", + non_zero_abs_send_time); + printf("Packets with non-zero timestamp offset: %d\n", + non_zero_ts_offsets); + return 0; +} diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/interface/receive_statistics.h b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/interface/receive_statistics.h index 707adaa0cd3b..6f2ea4fb3e69 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/interface/receive_statistics.h +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/interface/receive_statistics.h @@ -23,24 +23,9 @@ class Clock; class StreamStatistician { public: - struct Statistics { - Statistics() - : fraction_lost(0), - cumulative_lost(0), - extended_max_sequence_number(0), - jitter(0), - max_jitter(0) {} - - uint8_t fraction_lost; - uint32_t cumulative_lost; - uint32_t extended_max_sequence_number; - uint32_t jitter; - uint32_t max_jitter; - }; - virtual ~StreamStatistician(); - virtual bool GetStatistics(Statistics* statistics, bool reset) = 0; + virtual bool GetStatistics(RtcpStatistics* statistics, bool reset) = 0; virtual void GetDataCounters(uint32_t* bytes_received, uint32_t* packets_received) const = 0; virtual uint32_t BitrateReceived() const = 0; @@ -66,9 +51,13 @@ class ReceiveStatistics : public Module { static ReceiveStatistics* Create(Clock* clock); // Updates the receive statistics with this packet. - virtual void IncomingPacket(const RTPHeader& rtp_header, size_t bytes, + virtual void IncomingPacket(const RTPHeader& rtp_header, + size_t bytes, bool retransmitted) = 0; + // Increment counter for number of FEC packets received. + virtual void FecPacketReceived(uint32_t ssrc) = 0; + // Returns a map of all statisticians which have seen an incoming packet // during the last two seconds. virtual StatisticianMap GetActiveStatisticians() const = 0; @@ -78,17 +67,31 @@ class ReceiveStatistics : public Module { // Sets the max reordering threshold in number of packets. virtual void SetMaxReorderingThreshold(int max_reordering_threshold) = 0; + + // Called on new RTCP stats creation. + virtual void RegisterRtcpStatisticsCallback( + RtcpStatisticsCallback* callback) = 0; + + // Called on new RTP stats creation. + virtual void RegisterRtpStatisticsCallback( + StreamDataCountersCallback* callback) = 0; }; class NullReceiveStatistics : public ReceiveStatistics { public: - virtual void IncomingPacket(const RTPHeader& rtp_header, size_t bytes, + virtual void IncomingPacket(const RTPHeader& rtp_header, + size_t bytes, bool retransmitted) OVERRIDE; + virtual void FecPacketReceived(uint32_t ssrc) OVERRIDE; virtual StatisticianMap GetActiveStatisticians() const OVERRIDE; virtual StreamStatistician* GetStatistician(uint32_t ssrc) const OVERRIDE; virtual int32_t TimeUntilNextProcess() OVERRIDE; virtual int32_t Process() OVERRIDE; virtual void SetMaxReorderingThreshold(int max_reordering_threshold) OVERRIDE; + virtual void RegisterRtcpStatisticsCallback(RtcpStatisticsCallback* callback) + OVERRIDE; + virtual void RegisterRtpStatisticsCallback( + StreamDataCountersCallback* callback) OVERRIDE; }; } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h index 0f16c57e871f..1c434828cdb1 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h @@ -63,7 +63,7 @@ class RtpRtcp : public Module { RtcpFeedback* rtcp_feedback; RtcpIntraFrameObserver* intra_frame_callback; RtcpBandwidthObserver* bandwidth_callback; - RtcpRttObserver* rtt_observer; + RtcpRttStats* rtt_stats; RtpAudioFeedback* audio_messages; RemoteBitrateEstimator* remote_bitrate_estimator; PacedSender* paced_sender; @@ -251,7 +251,7 @@ class RtpRtcp : public Module { /* * Turn on/off sending RTX (RFC 4588) on a specific SSRC. */ - virtual int32_t SetRTXSendStatus(RtxMode mode, bool set_ssrc, + virtual int32_t SetRTXSendStatus(int modes, bool set_ssrc, uint32_t ssrc) = 0; // Sets the payload type to use when sending RTX packets. Note that this @@ -261,7 +261,7 @@ class RtpRtcp : public Module { /* * Get status of sending RTX (RFC 4588) on a specific SSRC. */ - virtual int32_t RTXSendStatus(RtxMode* mode, uint32_t* ssrc, + virtual int32_t RTXSendStatus(int* modes, uint32_t* ssrc, int* payloadType) const = 0; /* @@ -300,6 +300,13 @@ class RtpRtcp : public Module { uint32_t* fecRate, uint32_t* nackRate) const = 0; + /* + * Called on any new send bitrate estimate. + */ + virtual void RegisterVideoBitrateObserver( + BitrateStatisticsObserver* observer) = 0; + virtual BitrateStatisticsObserver* GetVideoBitrateObserver() const = 0; + /* * Used by the codec module to deliver a video or audio frame for * packetization. @@ -324,11 +331,26 @@ class RtpRtcp : public Module { const RTPFragmentationHeader* fragmentation = NULL, const RTPVideoHeader* rtpVideoHdr = NULL) = 0; - virtual bool TimeToSendPacket(uint32_t ssrc, uint16_t sequence_number, - int64_t capture_time_ms) = 0; + virtual bool TimeToSendPacket(uint32_t ssrc, + uint16_t sequence_number, + int64_t capture_time_ms, + bool retransmission) = 0; virtual int TimeToSendPadding(int bytes) = 0; + virtual void RegisterSendFrameCountObserver( + FrameCountObserver* observer) = 0; + virtual FrameCountObserver* GetSendFrameCountObserver() const = 0; + + virtual bool GetSendSideDelay(int* avg_send_delay_ms, + int* max_send_delay_ms) const = 0; + + // Called on generation of new statistics after an RTP send. + virtual void RegisterSendChannelRtpStatisticsCallback( + StreamDataCountersCallback* callback) = 0; + virtual StreamDataCountersCallback* + GetSendChannelRtpStatisticsCallback() const = 0; + /************************************************************************** * * RTCP @@ -418,12 +440,6 @@ class RtpRtcp : public Module { */ virtual int32_t ResetRTT(const uint32_t remoteSSRC)= 0 ; - /* - * Sets the estimated RTT, to be used for receive only modules without - * possibility of calculating its own RTT. - */ - virtual void SetRtt(uint32_t rtt) = 0; - /* * Get time of last rr, as well as packets received remotely * (derived from rr report + cached sender-side info). @@ -520,6 +536,13 @@ class RtpRtcp : public Module { virtual int32_t SetRTCPVoIPMetrics( const RTCPVoIPMetric* VoIPMetric) = 0; + /* + * (XR) Receiver Reference Time Report + */ + virtual void SetRtcpXrRrtrStatus(bool enable) = 0; + + virtual bool RtcpXrRrtrStatus() const = 0; + /* * (REMB) Receiver Estimated Max Bitrate */ @@ -594,6 +617,12 @@ class RtpRtcp : public Module { // Returns true if the module is configured to store packets. virtual bool StorePackets() const = 0; + // Called on receipt of RTCP report block from remote side. + virtual void RegisterSendChannelRtcpStatisticsCallback( + RtcpStatisticsCallback* callback) = 0; + virtual RtcpStatisticsCallback* + GetSendChannelRtcpStatisticsCallback() = 0; + /************************************************************************** * * Audio diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h index de5fcb2f5bc2..6f99f938de6d 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h @@ -27,6 +27,9 @@ namespace webrtc { const int kVideoPayloadTypeFrequency = 90000; +// Minimum RTP header size in bytes. +const uint8_t kRtpHeaderSize = 12; + struct AudioPayload { uint32_t frequency; @@ -96,7 +99,9 @@ enum RTCPPacketType kRtcpSli = 0x4000, kRtcpRpsi = 0x8000, kRtcpRemb = 0x10000, - kRtcpTransmissionTimeOffset = 0x20000 + kRtcpTransmissionTimeOffset = 0x20000, + kRtcpXrReceiverReferenceTime = 0x40000, + kRtcpXrDlrrReportBlock = 0x80000 }; enum KeyFrameRequestMethod @@ -127,9 +132,10 @@ enum RetransmissionMode { }; enum RtxMode { - kRtxOff = 0, - kRtxRetransmitted = 1, // Apply RTX only to retransmitted packets. - kRtxAll = 2 // Apply RTX to all packets (source + retransmissions). + kRtxOff = 0x0, + kRtxRetransmitted = 0x1, // Only send retransmissions over RTX. + kRtxRedundantPayloads = 0x2 // Preventively send redundant payloads + // instead of padding. }; const int kRtxHeaderSize = 2; @@ -177,11 +183,20 @@ struct RTCPReportBlock { uint32_t delaySinceLastSR; }; +struct RtcpReceiveTimeInfo { + // Fields as described by RFC 3611 4.5. + uint32_t sourceSSRC; + uint32_t lastRR; + uint32_t delaySinceLastRR; +}; + typedef std::list ReportBlockList; class RtpData { public: + virtual ~RtpData() {} + virtual int32_t OnReceivedPayloadData( const uint8_t* payloadData, const uint16_t payloadSize, @@ -189,8 +204,6 @@ public: virtual bool OnRecoveredPacket(const uint8_t* packet, int packet_length) = 0; -protected: - virtual ~RtpData() {} }; class RtcpFeedback @@ -216,6 +229,8 @@ protected: class RtpFeedback { public: + virtual ~RtpFeedback() {} + // Receiving payload change or SSRC change. (return success!) /* * channels - number of channels in codec (1 = mono, 2 = stereo) @@ -236,9 +251,6 @@ public: const bool added) = 0; virtual void ResetStatistics(uint32_t ssrc) = 0; - -protected: - virtual ~RtpFeedback() {} }; class RtpAudioFeedback { @@ -280,11 +292,13 @@ class RtcpBandwidthObserver { virtual ~RtcpBandwidthObserver() {} }; -class RtcpRttObserver { +class RtcpRttStats { public: virtual void OnRttUpdate(uint32_t rtt) = 0; - virtual ~RtcpRttObserver() {}; + virtual uint32_t LastProcessedRtt() const = 0; + + virtual ~RtcpRttStats() {}; }; // Null object version of RtpFeedback. diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h index 608fe351abb0..42c7b4eab91e 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h @@ -85,9 +85,9 @@ class MockRtpRtcp : public RtpRtcp { MOCK_METHOD1(SetCSRCStatus, int32_t(const bool include)); MOCK_METHOD3(SetRTXSendStatus, - int32_t(RtxMode mode, bool setSSRC, uint32_t ssrc)); + int32_t(int modes, bool setSSRC, uint32_t ssrc)); MOCK_CONST_METHOD3(RTXSendStatus, - int32_t(RtxMode* mode, uint32_t* ssrc, int* payload_type)); + int32_t(int* modes, uint32_t* ssrc, int* payload_type)); MOCK_METHOD1(SetRtxSendPayloadType, void(int)); MOCK_METHOD1(SetSendingStatus, @@ -100,6 +100,8 @@ class MockRtpRtcp : public RtpRtcp { bool()); MOCK_CONST_METHOD4(BitrateSent, void(uint32_t* totalRate, uint32_t* videoRate, uint32_t* fecRate, uint32_t* nackRate)); + MOCK_METHOD1(RegisterVideoBitrateObserver, void(BitrateStatisticsObserver*)); + MOCK_CONST_METHOD0(GetVideoBitrateObserver, BitrateStatisticsObserver*(void)); MOCK_CONST_METHOD1(EstimatedReceiveBandwidth, int(uint32_t* available_bandwidth)); MOCK_METHOD8(SendOutgoingData, @@ -111,10 +113,13 @@ class MockRtpRtcp : public RtpRtcp { const uint32_t payloadSize, const RTPFragmentationHeader* fragmentation, const RTPVideoHeader* rtpVideoHdr)); - MOCK_METHOD3(TimeToSendPacket, - bool(uint32_t ssrc, uint16_t sequence_number, int64_t capture_time_ms)); + MOCK_METHOD4(TimeToSendPacket, + bool(uint32_t ssrc, uint16_t sequence_number, int64_t capture_time_ms, + bool retransmission)); MOCK_METHOD1(TimeToSendPadding, int(int bytes)); + MOCK_CONST_METHOD2(GetSendSideDelay, + bool(int* avg_send_delay_ms, int* max_send_delay_ms)); MOCK_METHOD3(RegisterRtcpObservers, void(RtcpIntraFrameObserver* intraFrameCallback, RtcpBandwidthObserver* bandwidthCallback, @@ -145,7 +150,6 @@ class MockRtpRtcp : public RtpRtcp { int32_t(const uint32_t remoteSSRC, uint16_t* RTT, uint16_t* avgRTT, uint16_t* minRTT, uint16_t* maxRTT)); MOCK_METHOD1(ResetRTT, int32_t(const uint32_t remoteSSRC)); - MOCK_METHOD1(SetRtt, void(uint32_t rtt)); MOCK_METHOD1(SendRTCP, int32_t(uint32_t rtcpPacketType)); MOCK_METHOD1(SendRTCPReferencePictureSelection, @@ -168,6 +172,10 @@ class MockRtpRtcp : public RtpRtcp { int32_t(const uint8_t subType, const uint32_t name, const uint8_t* data, const uint16_t length)); MOCK_METHOD1(SetRTCPVoIPMetrics, int32_t(const RTCPVoIPMetric* VoIPMetric)); + MOCK_METHOD1(SetRtcpXrRrtrStatus, + void(bool enable)); + MOCK_CONST_METHOD0(RtcpXrRrtrStatus, + bool()); MOCK_CONST_METHOD0(REMB, bool()); MOCK_METHOD1(SetREMBStatus, @@ -197,6 +205,10 @@ class MockRtpRtcp : public RtpRtcp { MOCK_METHOD2(SetStorePacketsStatus, int32_t(const bool enable, const uint16_t numberToStore)); MOCK_CONST_METHOD0(StorePackets, bool()); + MOCK_METHOD1(RegisterSendChannelRtcpStatisticsCallback, + void(RtcpStatisticsCallback*)); + MOCK_METHOD0(GetSendChannelRtcpStatisticsCallback, + RtcpStatisticsCallback*()); MOCK_METHOD1(RegisterAudioCallback, int32_t(RtpAudioFeedback* messagesCallback)); MOCK_METHOD1(SetAudioPacketSize, @@ -236,7 +248,14 @@ class MockRtpRtcp : public RtpRtcp { int32_t()); MOCK_METHOD0(Process, int32_t()); - + MOCK_METHOD1(RegisterSendFrameCountObserver, + void(FrameCountObserver*)); + MOCK_CONST_METHOD0(GetSendFrameCountObserver, + FrameCountObserver*(void)); + MOCK_METHOD1(RegisterSendChannelRtpStatisticsCallback, + void(StreamDataCountersCallback*)); + MOCK_CONST_METHOD0(GetSendChannelRtpStatisticsCallback, + StreamDataCountersCallback*(void)); // Members. unsigned int remote_ssrc_; }; diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/H264/h264_information.h b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/H264/h264_information.h index 42b0b88393eb..356a026ec6f2 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/H264/h264_information.h +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/H264/h264_information.h @@ -12,7 +12,7 @@ #define WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_H264_INFORMATION_H_ #include "VideoCodecInformation.h" -#include "typedefs.h" +#include "webrtc/typedefs.h" namespace webrtc { enum diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/H264/rtp_sender_h264.h b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/H264/rtp_sender_h264.h index 92a4de6d8fa5..ea385d4596b0 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/H264/rtp_sender_h264.h +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/H264/rtp_sender_h264.h @@ -11,7 +11,7 @@ #ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_RTP_SENDER_H264_H_ #define WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_RTP_SENDER_H264_H_ -#include "typedefs.h" +#include "webrtc/typedefs.h" #include "ModuleRTPRTCPConfig.h" #include "rtp_rtcp_defines.h" #include "h264_information.h" diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/bitrate.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/bitrate.cc index d8145d16d76c..11b3cb29d5fc 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/bitrate.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/bitrate.cc @@ -15,7 +15,7 @@ namespace webrtc { -Bitrate::Bitrate(Clock* clock) +Bitrate::Bitrate(Clock* clock, Observer* observer) : clock_(clock), crit_(CriticalSectionWrapper::CreateCriticalSection()), packet_rate_(0), @@ -23,12 +23,15 @@ Bitrate::Bitrate(Clock* clock) bitrate_next_idx_(0), time_last_rate_update_(0), bytes_count_(0), - packet_count_(0) { + packet_count_(0), + observer_(observer) { memset(packet_rate_array_, 0, sizeof(packet_rate_array_)); memset(bitrate_diff_ms_, 0, sizeof(bitrate_diff_ms_)); memset(bitrate_array_, 0, sizeof(bitrate_array_)); } +Bitrate::~Bitrate() {} + void Bitrate::Update(const int32_t bytes) { CriticalSectionScoped cs(crit_.get()); bytes_count_ += bytes; @@ -68,43 +71,53 @@ int64_t Bitrate::time_last_rate_update() const { return time_last_rate_update_; } +// Triggered by timer. void Bitrate::Process() { - // Triggered by timer. - CriticalSectionScoped cs(crit_.get()); - int64_t now = clock_->TimeInMilliseconds(); - int64_t diff_ms = now - time_last_rate_update_; + BitrateStatistics stats; + { + CriticalSectionScoped cs(crit_.get()); + int64_t now = clock_->CurrentNtpInMilliseconds(); + int64_t diff_ms = now - time_last_rate_update_; - if (diff_ms < 100) { - // Not enough data, wait... - return; - } - if (diff_ms > 10000) { // 10 seconds. - // Too high difference, ignore. + if (diff_ms < 100) { + // Not enough data, wait... + return; + } + if (diff_ms > 10000) { // 10 seconds. + // Too high difference, ignore. + time_last_rate_update_ = now; + bytes_count_ = 0; + packet_count_ = 0; + return; + } + packet_rate_array_[bitrate_next_idx_] = (packet_count_ * 1000) / diff_ms; + bitrate_array_[bitrate_next_idx_] = 8 * ((bytes_count_ * 1000) / diff_ms); + bitrate_diff_ms_[bitrate_next_idx_] = diff_ms; + bitrate_next_idx_++; + if (bitrate_next_idx_ >= 10) { + bitrate_next_idx_ = 0; + } + int64_t sum_diffMS = 0; + int64_t sum_bitrateMS = 0; + int64_t sum_packetrateMS = 0; + for (int i = 0; i < 10; i++) { + sum_diffMS += bitrate_diff_ms_[i]; + sum_bitrateMS += bitrate_array_[i] * bitrate_diff_ms_[i]; + sum_packetrateMS += packet_rate_array_[i] * bitrate_diff_ms_[i]; + } time_last_rate_update_ = now; bytes_count_ = 0; packet_count_ = 0; - return; + packet_rate_ = static_cast(sum_packetrateMS / sum_diffMS); + bitrate_ = static_cast(sum_bitrateMS / sum_diffMS); + + stats.bitrate_bps = bitrate_; + stats.packet_rate = packet_rate_; + stats.timestamp_ms = now; } - packet_rate_array_[bitrate_next_idx_] = (packet_count_ * 1000) / diff_ms; - bitrate_array_[bitrate_next_idx_] = 8 * ((bytes_count_ * 1000) / diff_ms); - bitrate_diff_ms_[bitrate_next_idx_] = diff_ms; - bitrate_next_idx_++; - if (bitrate_next_idx_ >= 10) { - bitrate_next_idx_ = 0; - } - int64_t sum_diffMS = 0; - int64_t sum_bitrateMS = 0; - int64_t sum_packetrateMS = 0; - for (int i = 0; i < 10; i++) { - sum_diffMS += bitrate_diff_ms_[i]; - sum_bitrateMS += bitrate_array_[i] * bitrate_diff_ms_[i]; - sum_packetrateMS += packet_rate_array_[i] * bitrate_diff_ms_[i]; - } - time_last_rate_update_ = now; - bytes_count_ = 0; - packet_count_ = 0; - packet_rate_ = static_cast(sum_packetrateMS / sum_diffMS); - bitrate_ = static_cast(sum_bitrateMS / sum_diffMS); + + if (observer_) + observer_->BitrateUpdated(stats); } } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/bitrate.h b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/bitrate.h index 17a099a86a61..36fa1d37175b 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/bitrate.h +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/bitrate.h @@ -27,7 +27,9 @@ class CriticalSectionWrapper; class Bitrate { public: - explicit Bitrate(Clock* clock); + class Observer; + Bitrate(Clock* clock, Observer* observer); + virtual ~Bitrate(); // Calculates rates. void Process(); @@ -46,6 +48,14 @@ class Bitrate { int64_t time_last_rate_update() const; + class Observer { + public: + Observer() {} + virtual ~Observer() {} + + virtual void BitrateUpdated(const BitrateStatistics& stats) = 0; + }; + protected: Clock* clock_; @@ -60,6 +70,7 @@ class Bitrate { int64_t time_last_rate_update_; uint32_t bytes_count_; uint32_t packet_count_; + Observer* const observer_; }; } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/byte_io.h b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/byte_io.h new file mode 100644 index 000000000000..646f1eb55d9a --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/byte_io.h @@ -0,0 +1,238 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_BYTE_IO_H_ +#define WEBRTC_MODULES_RTP_RTCP_SOURCE_BYTE_IO_H_ + + +// This file contains classes for reading and writing integer types from/to +// byte array representations. Signed/unsigned, partial (whole byte) sizes, +// and big/little endian byte order is all supported. +// +// Usage examples: +// +// uint8_t* buffer = ...; +// +// // Read an unsigned 4 byte integer in big endian format +// uint32_t val = ByteReader::ReadBigEndian(buffer); +// +// // Read a signed 24-bit (3 byte) integer in little endian format +// int32_t val = ByteReader::ReadLittle(buffer); +// +// // Write an unsigned 8 byte integer in little endian format +// ByteWriter::WriteLittleEndian(buffer, val); +// +// Write an unsigned 40-bit (5 byte) integer in big endian format +// ByteWriter::WriteBigEndian(buffer, val); +// +// These classes are implemented as recursive templetizations, inteded to make +// it easy for the compiler to completely inline the reading/writing. + + +#include + +#include "webrtc/typedefs.h" + +namespace webrtc { + +// Class for reading integers from a sequence of bytes. +// T = type of integer, B = bytes to read, is_signed = true if signed integer +// If is_signed is true and B < sizeof(T), sign extension might be needed +template::is_signed> +class ByteReader { + public: + static T ReadBigEndian(uint8_t* data) { + if (is_signed && B < sizeof(T)) { + return SignExtend(InternalReadBigEndian(data)); + } + return InternalReadBigEndian(data); + } + + static T ReadLittleEndian(uint8_t* data) { + if (is_signed && B < sizeof(T)) { + return SignExtend(InternalReadLittleEndian(data)); + } + return InternalReadLittleEndian(data); + } + + private: + static T InternalReadBigEndian(uint8_t* data) { + T val(0); + for (unsigned int i = 0; i < B; ++i) { + val |= static_cast(data[i]) << ((B - 1 - i) * 8); + } + return val; + } + + static T InternalReadLittleEndian(uint8_t* data) { + T val(0); + for (unsigned int i = 0; i < B; ++i) { + val |= static_cast(data[i]) << (i * 8); + } + return val; + } + + // If number of bytes is less than native data type (eg 24 bit, in int32_t), + // and the most significant bit of the actual data is set, we must sign + // extend the remaining byte(s) with ones so that the correct negative + // number is retained. + // Ex: 0x810A0B -> 0xFF810A0B, but 0x710A0B -> 0x00710A0B + static T SignExtend(T val) { + uint8_t msb = static_cast(val >> ((B - 1) * 8)); + if (msb & 0x80) { + // Sign extension is -1 (all ones) shifted left B bytes. + // The "B % sizeof(T)"-part is there to avoid compiler warning for + // shifting the whole size of the data type. + T sign_extend = (sizeof(T) == B ? 0 : + (static_cast(-1L) << ((B % sizeof(T)) * 8))); + + return val | sign_extend; + } + return val; + } +}; + +// Class for writing integers to a sequence of bytes +// T = type of integer, B = bytes to write +template +class ByteWriter { + public: + static void WriteBigEndian(uint8_t* data, T val) { + for (unsigned int i = 0; i < B; ++i) { + data[i] = val >> ((B - 1 - i) * 8); + } + } + + static void WriteLittleEndian(uint8_t* data, T val) { + for (unsigned int i = 0; i < B; ++i) { + data[i] = val >> (i * 8); + } + } +}; + + +// -------- Below follows specializations for B in { 2, 4, 8 } -------- + + +// Specializations for two byte words +template +class ByteReader { + public: + static T ReadBigEndian(uint8_t* data) { + return (data[0] << 8) | data[1]; + } + + static T ReadLittleEndian(uint8_t* data) { + return data[0] | (data[1] << 8); + } +}; + +template +class ByteWriter { + public: + static void WriteBigEndian(uint8_t* data, T val) { + data[0] = val >> 8; + data[1] = val; + } + + static void WriteLittleEndian(uint8_t* data, T val) { + data[0] = val; + data[1] = val >> 8; + } +}; + +// Specializations for four byte words. +template +class ByteReader { + public: + static T ReadBigEndian(uint8_t* data) { + return (data[0] << 24) | (data[1] << 16) | (data[2] << 8) | data[3]; + } + + static T ReadLittleEndian(uint8_t* data) { + return data[0] | (data[1] << 8) | (data[2] << 16) | (data[3] << 24); + } +}; + +// Specializations for four byte words. +template +class ByteWriter { + public: + static void WriteBigEndian(uint8_t* data, T val) { + data[0] = val >> 24; + data[1] = val >> 16; + data[2] = val >> 8; + data[3] = val; + } + + static void WriteLittleEndian(uint8_t* data, T val) { + data[0] = val; + data[1] = val >> 8; + data[2] = val >> 16; + data[3] = val >> 24; + } +}; + +// Specializations for eight byte words. +template +class ByteReader { + public: + static T ReadBigEndian(uint8_t* data) { + return + (Get(data, 0) << 56) | (Get(data, 1) << 48) | + (Get(data, 2) << 40) | (Get(data, 3) << 32) | + (Get(data, 4) << 24) | (Get(data, 5) << 16) | + (Get(data, 6) << 8) | Get(data, 7); + } + + static T ReadLittleEndian(uint8_t* data) { + return + Get(data, 0) | (Get(data, 1) << 8) | + (Get(data, 2) << 16) | (Get(data, 3) << 24) | + (Get(data, 4) << 32) | (Get(data, 5) << 40) | + (Get(data, 6) << 48) | (Get(data, 7) << 56); + } + + private: + inline static T Get(uint8_t* data, unsigned int index) { + return static_cast(data[index]); + } +}; + +template +class ByteWriter { + public: + static void WriteBigEndian(uint8_t* data, T val) { + data[0] = val >> 56; + data[1] = val >> 48; + data[2] = val >> 40; + data[3] = val >> 32; + data[4] = val >> 24; + data[5] = val >> 16; + data[6] = val >> 8; + data[7] = val; + } + + static void WriteLittleEndian(uint8_t* data, T val) { + data[0] = val; + data[1] = val >> 8; + data[2] = val >> 16; + data[3] = val >> 24; + data[4] = val >> 32; + data[5] = val >> 40; + data[6] = val >> 48; + data[7] = val >> 56; + } +}; + +} // namespace webrtc + +#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_BYTE_IO_H_ diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/byte_io_unittest.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/byte_io_unittest.cc new file mode 100644 index 000000000000..5b7010994e45 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/byte_io_unittest.cc @@ -0,0 +1,210 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include + +#include "testing/gtest/include/gtest/gtest.h" +#include "webrtc/modules/rtp_rtcp/source/byte_io.h" + +namespace webrtc { +namespace { + +class ByteIoTest : public ::testing::Test { + protected: + ByteIoTest() {} + virtual ~ByteIoTest() {} + + enum { kAlignments = sizeof(uint64_t) - 1 }; + + // Method to create a test value that is not the same when byte reversed. + template + T CreateTestValue(bool negative, uint8_t num_bytes) { + T val = 0; + for (uint8_t i = 0; i != num_bytes; ++i) { + val = (val << 8) + (negative ? (0xFF - i) : (i + 1)); + } + if (negative && std::numeric_limits::is_signed) { + val |= static_cast(-1) << (8 * num_bytes); + } + return val; + } + + // Populate byte buffer with value, in big endian format. + template + void PopulateTestData(uint8_t* data, T value, int num_bytes, bool bigendian) { + if (bigendian) { + for (int i = 0; i < num_bytes; ++i) { + data[i] = (value >> ((num_bytes - i - 1) * 8)) & 0xFF; + } + } else { + for (int i = 0; i < num_bytes; ++i) { + data[i] = (value >> (i * 8)) & 0xFF; + } + } + } + + // Test reading big endian numbers. + // Template arguments: Type T, read method RM(buffer), B bytes of data. + template + void TestRead(bool big_endian) { + // Test both for values that are positive and negative (if signed) + for (int neg = 0; neg < 2; ++neg) { + bool negative = neg > 0; + + // Write test value to byte buffer, in big endian format. + T test_value = CreateTestValue(negative, B); + uint8_t bytes[B + kAlignments]; + + // Make one test for each alignment. + for (int i = 0; i < kAlignments; ++i) { + PopulateTestData(bytes + i, test_value, B, big_endian); + + // Check that test value is retrieved from buffer when used read method. + EXPECT_EQ(test_value, RM(bytes + i)); + } + } + } + + // Test writing big endian numbers. + // Template arguments: Type T, write method WM(buffer, value), B bytes of data + template + void TestWrite(bool big_endian) { + // Test both for values that are positive and negative (if signed). + for (int neg = 0; neg < 2; ++neg) { + bool negative = neg > 0; + + // Write test value to byte buffer, in big endian format. + T test_value = CreateTestValue(negative, B); + uint8_t expected_bytes[B + kAlignments]; + uint8_t bytes[B + kAlignments]; + + // Make one test for each alignment. + for (int i = 0; i < kAlignments; ++i) { + PopulateTestData(expected_bytes + i, test_value, B, big_endian); + + // Zero initialize buffer and let WM populate it. + memset(bytes, 0, B + kAlignments); + WM(bytes + i, test_value); + + // Check that data produced by WM is big endian as expected. + for (int j = 0; j < B; ++j) { + EXPECT_EQ(expected_bytes[i + j], bytes[i + j]); + } + } + } + } +}; + +TEST_F(ByteIoTest, Test16UBitBigEndian) { + TestRead::ReadBigEndian, + sizeof(uint16_t)>(true); + TestWrite::WriteBigEndian, + sizeof(uint16_t)>(true); +} + +TEST_F(ByteIoTest, Test24UBitBigEndian) { + TestRead::ReadBigEndian, 3>(true); + TestWrite::WriteBigEndian, 3>(true); +} + +TEST_F(ByteIoTest, Test32UBitBigEndian) { + TestRead::ReadBigEndian, + sizeof(uint32_t)>(true); + TestWrite::WriteBigEndian, + sizeof(uint32_t)>(true); +} + +TEST_F(ByteIoTest, Test64UBitBigEndian) { + TestRead::ReadBigEndian, + sizeof(uint64_t)>(true); + TestWrite::WriteBigEndian, + sizeof(uint64_t)>(true); +} + +TEST_F(ByteIoTest, Test16SBitBigEndian) { + TestRead::ReadBigEndian, + sizeof(int16_t)>(true); + TestWrite::WriteBigEndian, + sizeof(int16_t)>(true); +} + +TEST_F(ByteIoTest, Test24SBitBigEndian) { + TestRead::ReadBigEndian, 3>(true); + TestWrite::WriteBigEndian, 3>(true); +} + +TEST_F(ByteIoTest, Test32SBitBigEndian) { + TestRead::ReadBigEndian, + sizeof(int32_t)>(true); + TestWrite::WriteBigEndian, + sizeof(int32_t)>(true); +} + +TEST_F(ByteIoTest, Test64SBitBigEndian) { + TestRead::ReadBigEndian, + sizeof(int64_t)>(true); + TestWrite::WriteBigEndian, + sizeof(int64_t)>(true); +} + +TEST_F(ByteIoTest, Test16UBitLittleEndian) { + TestRead::ReadLittleEndian, + sizeof(uint16_t)>(false); + TestWrite::WriteLittleEndian, + sizeof(uint16_t)>(false); +} + +TEST_F(ByteIoTest, Test24UBitLittleEndian) { + TestRead::ReadLittleEndian, 3>(false); + TestWrite::WriteLittleEndian, 3>(false); +} + +TEST_F(ByteIoTest, Test32UBitLittleEndian) { + TestRead::ReadLittleEndian, + sizeof(uint32_t)>(false); + TestWrite::WriteLittleEndian, + sizeof(uint32_t)>(false); +} + +TEST_F(ByteIoTest, Test64UBitLittleEndian) { + TestRead::ReadLittleEndian, + sizeof(uint64_t)>(false); + TestWrite::WriteLittleEndian, + sizeof(uint64_t)>(false); +} + +TEST_F(ByteIoTest, Test16SBitLittleEndian) { + TestRead::ReadLittleEndian, + sizeof(int16_t)>(false); + TestWrite::WriteLittleEndian, + sizeof(int16_t)>(false); +} + +TEST_F(ByteIoTest, Test24SBitLittleEndian) { + TestRead::ReadLittleEndian, 3>(false); + TestWrite::WriteLittleEndian, 3>(false); +} + +TEST_F(ByteIoTest, Test32SBitLittleEndian) { + TestRead::ReadLittleEndian, + sizeof(int32_t)>(false); + TestWrite::WriteLittleEndian, + sizeof(int32_t)>(false); +} + +TEST_F(ByteIoTest, Test64SBitLittleEndian) { + TestRead::ReadLittleEndian, + sizeof(int64_t)>(false); + TestWrite::WriteLittleEndian, + sizeof(int64_t)>(false); +} + +} // namespace +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.cc index 0dc142f867c5..20be2d5dd389 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.cc @@ -223,6 +223,7 @@ int32_t FecReceiverImpl::ProcessReceivedFec() { crit_sect_->Enter(); } if (fec_->DecodeFEC(&received_packet_list_, &recovered_packet_list_) != 0) { + crit_sect_->Leave(); return -1; } assert(received_packet_list_.empty()); diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/fec_test_helper.h b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/fec_test_helper.h index e3c3581be733..e6426ea7eea7 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/fec_test_helper.h +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/fec_test_helper.h @@ -16,9 +16,6 @@ namespace webrtc { -enum { - kRtpHeaderSize = 12 -}; enum { kFecPayloadType = 96 }; diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc index d7b7877b9863..af2cb9e83443 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc @@ -11,21 +11,19 @@ #include "webrtc/modules/rtp_rtcp/source/forward_error_correction.h" #include -#include // for abs() +#include #include #include #include +#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h" #include "webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.h" #include "webrtc/modules/rtp_rtcp/source/rtp_utility.h" #include "webrtc/system_wrappers/interface/trace.h" namespace webrtc { -// Minimum RTP header size in bytes. -const uint8_t kRtpHeaderSize = 12; - // FEC header size in bytes. const uint8_t kFecHeaderSize = 10; diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.h b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.h index 18967d77b8f5..f82e46d57238 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.h +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.h @@ -67,5 +67,4 @@ void GeneratePacketMasks(int num_media_packets, int num_fec_packets, } // namespace internal } // namespace webrtc - -#endif +#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_FORWARD_ERROR_CORRECTION_INTERNAL_H_ diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc index 3c87ebf91b3b..8c6cc5434cd8 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc @@ -343,27 +343,3 @@ TEST_F(RtpRtcpRtxNackTest, RtxNack) { EXPECT_EQ(kTestNumberOfRtxPackets, transport_.count_rtx_ssrc_); EXPECT_TRUE(ExpectedPacketsReceived()); } - -TEST_F(RtpRtcpRtxNackTest, RTXAllNoLoss) { - RunRtxTest(kRtxAll, 0); - EXPECT_EQ(kTestSequenceNumber, *(receiver_.sequence_numbers_.begin())); - EXPECT_EQ(kTestSequenceNumber + kTestNumberOfPackets - 1, - *(receiver_.sequence_numbers_.rbegin())); - // We have transmitted all packets twice, and loss was set to 0. - EXPECT_EQ(kTestNumberOfPackets * 2u, receiver_.sequence_numbers_.size()); - // Half of the packets should be via RTX. - EXPECT_EQ(static_cast(kTestNumberOfPackets), - transport_.count_rtx_ssrc_); -} - -TEST_F(RtpRtcpRtxNackTest, RTXAllWithLoss) { - int loss = 10; - RunRtxTest(kRtxAll, loss); - EXPECT_EQ(kTestSequenceNumber, *(receiver_.sequence_numbers_.begin())); - EXPECT_EQ(kTestSequenceNumber + kTestNumberOfPackets - 1, - *(receiver_.sequence_numbers_.rbegin())); - // Got everything but lost packets. - EXPECT_EQ(2u * (kTestNumberOfPackets - kTestNumberOfPackets / loss), - receiver_.sequence_numbers_.size()); - EXPECT_EQ(static_cast(kTestNumberOfPackets), transport_.count_rtx_ssrc_); -} diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc index cf189903d13c..aa7c9c571568 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc @@ -24,14 +24,16 @@ const int kStatisticsProcessIntervalMs = 1000; StreamStatistician::~StreamStatistician() {} -StreamStatisticianImpl::StreamStatisticianImpl(Clock* clock) +StreamStatisticianImpl::StreamStatisticianImpl( + Clock* clock, + RtcpStatisticsCallback* rtcp_callback, + StreamDataCountersCallback* rtp_callback) : clock_(clock), - crit_sect_(CriticalSectionWrapper::CreateCriticalSection()), - incoming_bitrate_(clock), + stream_lock_(CriticalSectionWrapper::CreateCriticalSection()), + incoming_bitrate_(clock, NULL), ssrc_(0), max_reordering_threshold_(kDefaultMaxReorderingThreshold), jitter_q4_(0), - jitter_max_q4_(0), cumulative_loss_(0), jitter_q4_transmission_time_offset_(0), last_receive_time_ms_(0), @@ -42,53 +44,53 @@ StreamStatisticianImpl::StreamStatisticianImpl(Clock* clock) received_seq_first_(0), received_seq_max_(0), received_seq_wraps_(0), - first_packet_(true), received_packet_overhead_(12), - received_byte_count_(0), - received_retransmitted_packets_(0), - received_inorder_packet_count_(0), last_report_inorder_packets_(0), last_report_old_packets_(0), last_report_seq_max_(0), - last_reported_statistics_() {} + rtcp_callback_(rtcp_callback), + rtp_callback_(rtp_callback) {} void StreamStatisticianImpl::ResetStatistics() { - CriticalSectionScoped cs(crit_sect_.get()); + CriticalSectionScoped cs(stream_lock_.get()); last_report_inorder_packets_ = 0; last_report_old_packets_ = 0; last_report_seq_max_ = 0; - memset(&last_reported_statistics_, 0, sizeof(last_reported_statistics_)); + last_reported_statistics_ = RtcpStatistics(); jitter_q4_ = 0; - jitter_max_q4_ = 0; cumulative_loss_ = 0; jitter_q4_transmission_time_offset_ = 0; received_seq_wraps_ = 0; received_seq_max_ = 0; received_seq_first_ = 0; - received_byte_count_ = 0; - received_retransmitted_packets_ = 0; - received_inorder_packet_count_ = 0; - first_packet_ = true; + receive_counters_ = StreamDataCounters(); } void StreamStatisticianImpl::IncomingPacket(const RTPHeader& header, size_t bytes, bool retransmitted) { - CriticalSectionScoped cs(crit_sect_.get()); + UpdateCounters(header, bytes, retransmitted); + NotifyRtpCallback(); +} + +void StreamStatisticianImpl::UpdateCounters(const RTPHeader& header, + size_t bytes, + bool retransmitted) { + CriticalSectionScoped cs(stream_lock_.get()); bool in_order = InOrderPacketInternal(header.sequenceNumber); ssrc_ = header.ssrc; incoming_bitrate_.Update(bytes); - received_byte_count_ += bytes; + receive_counters_.bytes += + bytes - (header.paddingLength + header.headerLength); + receive_counters_.header_bytes += header.headerLength; + receive_counters_.padding_bytes += header.paddingLength; + ++receive_counters_.packets; + if (!in_order && retransmitted) { + ++receive_counters_.retransmitted_packets; + } - if (first_packet_) { - first_packet_ = false; - // This is the first received report. + if (receive_counters_.packets == 1) { received_seq_first_ = header.sequenceNumber; - received_seq_max_ = header.sequenceNumber; - received_inorder_packet_count_ = 1; - clock_->CurrentNtp(last_receive_time_secs_, last_receive_time_frac_); - last_receive_time_ms_ = clock_->TimeInMilliseconds(); - return; } // Count only the new packets received. That is, if packets 1, 2, 3, 5, 4, 6 @@ -98,66 +100,27 @@ void StreamStatisticianImpl::IncomingPacket(const RTPHeader& header, uint32_t receive_time_secs; uint32_t receive_time_frac; clock_->CurrentNtp(receive_time_secs, receive_time_frac); - received_inorder_packet_count_++; // Wrong if we use RetransmitOfOldPacket. - int32_t seq_diff = header.sequenceNumber - received_seq_max_; - if (seq_diff < 0) { + if (receive_counters_.packets > 1 && + received_seq_max_ > header.sequenceNumber) { // Wrap around detected. received_seq_wraps_++; } // New max. received_seq_max_ = header.sequenceNumber; + // If new time stamp and more than one in-order packet received, calculate + // new jitter statistics. if (header.timestamp != last_received_timestamp_ && - received_inorder_packet_count_ > 1) { - uint32_t receive_time_rtp = ModuleRTPUtility::ConvertNTPTimeToRTP( - receive_time_secs, receive_time_frac, header.payload_type_frequency); - uint32_t last_receive_time_rtp = ModuleRTPUtility::ConvertNTPTimeToRTP( - last_receive_time_secs_, last_receive_time_frac_, - header.payload_type_frequency); - int32_t time_diff_samples = (receive_time_rtp - last_receive_time_rtp) - - (header.timestamp - last_received_timestamp_); - - time_diff_samples = abs(time_diff_samples); - - // lib_jingle sometimes deliver crazy jumps in TS for the same stream. - // If this happens, don't update jitter value. Use 5 secs video frequency - // as the threshold. - if (time_diff_samples < 450000) { - // Note we calculate in Q4 to avoid using float. - int32_t jitter_diff_q4 = (time_diff_samples << 4) - jitter_q4_; - jitter_q4_ += ((jitter_diff_q4 + 8) >> 4); - } - - // Extended jitter report, RFC 5450. - // Actual network jitter, excluding the source-introduced jitter. - int32_t time_diff_samples_ext = - (receive_time_rtp - last_receive_time_rtp) - - ((header.timestamp + - header.extension.transmissionTimeOffset) - - (last_received_timestamp_ + - last_received_transmission_time_offset_)); - - time_diff_samples_ext = abs(time_diff_samples_ext); - - if (time_diff_samples_ext < 450000) { - int32_t jitter_diffQ4TransmissionTimeOffset = - (time_diff_samples_ext << 4) - jitter_q4_transmission_time_offset_; - jitter_q4_transmission_time_offset_ += - ((jitter_diffQ4TransmissionTimeOffset + 8) >> 4); - } + (receive_counters_.packets - receive_counters_.retransmitted_packets) > + 1) { + UpdateJitter(header, receive_time_secs, receive_time_frac); } last_received_timestamp_ = header.timestamp; last_receive_time_secs_ = receive_time_secs; last_receive_time_frac_ = receive_time_frac; last_receive_time_ms_ = clock_->TimeInMilliseconds(); - } else { - if (retransmitted) { - received_retransmitted_packets_++; - } else { - received_inorder_packet_count_++; - } } uint16_t packet_oh = header.headerLength + header.paddingLength; @@ -167,29 +130,113 @@ void StreamStatisticianImpl::IncomingPacket(const RTPHeader& header, received_packet_overhead_ = (15 * received_packet_overhead_ + packet_oh) >> 4; } +void StreamStatisticianImpl::UpdateJitter(const RTPHeader& header, + uint32_t receive_time_secs, + uint32_t receive_time_frac) { + uint32_t receive_time_rtp = ModuleRTPUtility::ConvertNTPTimeToRTP( + receive_time_secs, receive_time_frac, header.payload_type_frequency); + uint32_t last_receive_time_rtp = ModuleRTPUtility::ConvertNTPTimeToRTP( + last_receive_time_secs_, last_receive_time_frac_, + header.payload_type_frequency); + int32_t time_diff_samples = (receive_time_rtp - last_receive_time_rtp) - + (header.timestamp - last_received_timestamp_); + + time_diff_samples = abs(time_diff_samples); + + // lib_jingle sometimes deliver crazy jumps in TS for the same stream. + // If this happens, don't update jitter value. Use 5 secs video frequency + // as the threshold. + if (time_diff_samples < 450000) { + // Note we calculate in Q4 to avoid using float. + int32_t jitter_diff_q4 = (time_diff_samples << 4) - jitter_q4_; + jitter_q4_ += ((jitter_diff_q4 + 8) >> 4); + } + + // Extended jitter report, RFC 5450. + // Actual network jitter, excluding the source-introduced jitter. + int32_t time_diff_samples_ext = + (receive_time_rtp - last_receive_time_rtp) - + ((header.timestamp + + header.extension.transmissionTimeOffset) - + (last_received_timestamp_ + + last_received_transmission_time_offset_)); + + time_diff_samples_ext = abs(time_diff_samples_ext); + + if (time_diff_samples_ext < 450000) { + int32_t jitter_diffQ4TransmissionTimeOffset = + (time_diff_samples_ext << 4) - jitter_q4_transmission_time_offset_; + jitter_q4_transmission_time_offset_ += + ((jitter_diffQ4TransmissionTimeOffset + 8) >> 4); + } +} + +void StreamStatisticianImpl::NotifyRtpCallback() { + StreamDataCounters data; + uint32_t ssrc; + { + CriticalSectionScoped cs(stream_lock_.get()); + data = receive_counters_; + ssrc = ssrc_; + } + rtp_callback_->DataCountersUpdated(data, ssrc); +} + +void StreamStatisticianImpl::NotifyRtcpCallback() { + RtcpStatistics data; + uint32_t ssrc; + { + CriticalSectionScoped cs(stream_lock_.get()); + data = last_reported_statistics_; + ssrc = ssrc_; + } + rtcp_callback_->StatisticsUpdated(data, ssrc); +} + +void StreamStatisticianImpl::FecPacketReceived() { + { + CriticalSectionScoped cs(stream_lock_.get()); + ++receive_counters_.fec_packets; + } + NotifyRtpCallback(); +} + void StreamStatisticianImpl::SetMaxReorderingThreshold( int max_reordering_threshold) { - CriticalSectionScoped cs(crit_sect_.get()); + CriticalSectionScoped cs(stream_lock_.get()); max_reordering_threshold_ = max_reordering_threshold; } -bool StreamStatisticianImpl::GetStatistics(Statistics* statistics, bool reset) { - CriticalSectionScoped cs(crit_sect_.get()); - if (received_seq_first_ == 0 && received_byte_count_ == 0) { - // We have not received anything. - return false; - } - - if (!reset) { - if (last_report_inorder_packets_ == 0) { - // No report. +bool StreamStatisticianImpl::GetStatistics(RtcpStatistics* statistics, + bool reset) { + { + CriticalSectionScoped cs(stream_lock_.get()); + if (received_seq_first_ == 0 && receive_counters_.bytes == 0) { + // We have not received anything. return false; } - // Just get last report. - *statistics = last_reported_statistics_; - return true; + + if (!reset) { + if (last_report_inorder_packets_ == 0) { + // No report. + return false; + } + // Just get last report. + *statistics = last_reported_statistics_; + return true; + } + + *statistics = CalculateRtcpStatistics(); } + NotifyRtcpCallback(); + + return true; +} + +RtcpStatistics StreamStatisticianImpl::CalculateRtcpStatistics() { + RtcpStatistics stats; + if (last_report_inorder_packets_ == 0) { // First time we send a report. last_report_seq_max_ = received_seq_first_ - 1; @@ -206,7 +253,8 @@ bool StreamStatisticianImpl::GetStatistics(Statistics* statistics, bool reset) { // Number of received RTP packets since last report, counts all packets but // not re-transmissions. uint32_t rec_since_last = - received_inorder_packet_count_ - last_report_inorder_packets_; + (receive_counters_.packets - receive_counters_.retransmitted_packets) - + last_report_inorder_packets_; // With NACK we don't know the expected retransmissions during the last // second. We know how many "old" packets we have received. We just count @@ -218,7 +266,7 @@ bool StreamStatisticianImpl::GetStatistics(Statistics* statistics, bool reset) { // re-transmitted. We use RTT to decide if a packet is re-ordered or // re-transmitted. uint32_t retransmitted_packets = - received_retransmitted_packets_ - last_report_old_packets_; + receive_counters_.retransmitted_packets - last_report_old_packets_; rec_since_last += retransmitted_packets; int32_t missing = 0; @@ -231,64 +279,60 @@ bool StreamStatisticianImpl::GetStatistics(Statistics* statistics, bool reset) { local_fraction_lost = static_cast(255 * missing / exp_since_last); } - statistics->fraction_lost = local_fraction_lost; + stats.fraction_lost = local_fraction_lost; // We need a counter for cumulative loss too. cumulative_loss_ += missing; - - if (jitter_q4_ > jitter_max_q4_) { - jitter_max_q4_ = jitter_q4_; - } - statistics->cumulative_lost = cumulative_loss_; - statistics->extended_max_sequence_number = (received_seq_wraps_ << 16) + - received_seq_max_; + stats.cumulative_lost = cumulative_loss_; + stats.extended_max_sequence_number = + (received_seq_wraps_ << 16) + received_seq_max_; // Note: internal jitter value is in Q4 and needs to be scaled by 1/16. - statistics->jitter = jitter_q4_ >> 4; - statistics->max_jitter = jitter_max_q4_ >> 4; - if (reset) { - // Store this report. - last_reported_statistics_ = *statistics; + stats.jitter = jitter_q4_ >> 4; - // Only for report blocks in RTCP SR and RR. - last_report_inorder_packets_ = received_inorder_packet_count_; - last_report_old_packets_ = received_retransmitted_packets_; - last_report_seq_max_ = received_seq_max_; - } - return true; + // Store this report. + last_reported_statistics_ = stats; + + // Only for report blocks in RTCP SR and RR. + last_report_inorder_packets_ = + receive_counters_.packets - receive_counters_.retransmitted_packets; + last_report_old_packets_ = receive_counters_.retransmitted_packets; + last_report_seq_max_ = received_seq_max_; + + return stats; } void StreamStatisticianImpl::GetDataCounters( uint32_t* bytes_received, uint32_t* packets_received) const { - CriticalSectionScoped cs(crit_sect_.get()); + CriticalSectionScoped cs(stream_lock_.get()); if (bytes_received) { - *bytes_received = received_byte_count_; + *bytes_received = receive_counters_.bytes + receive_counters_.header_bytes + + receive_counters_.padding_bytes; } if (packets_received) { - *packets_received = - received_retransmitted_packets_ + received_inorder_packet_count_; + *packets_received = receive_counters_.packets; } } uint32_t StreamStatisticianImpl::BitrateReceived() const { - CriticalSectionScoped cs(crit_sect_.get()); + CriticalSectionScoped cs(stream_lock_.get()); return incoming_bitrate_.BitrateNow(); } void StreamStatisticianImpl::ProcessBitrate() { - CriticalSectionScoped cs(crit_sect_.get()); + CriticalSectionScoped cs(stream_lock_.get()); incoming_bitrate_.Process(); } void StreamStatisticianImpl::LastReceiveTimeNtp(uint32_t* secs, uint32_t* frac) const { - CriticalSectionScoped cs(crit_sect_.get()); + CriticalSectionScoped cs(stream_lock_.get()); *secs = last_receive_time_secs_; *frac = last_receive_time_frac_; } bool StreamStatisticianImpl::IsRetransmitOfOldPacket( const RTPHeader& header, int min_rtt) const { - CriticalSectionScoped cs(crit_sect_.get()); + CriticalSectionScoped cs(stream_lock_.get()); if (InOrderPacketInternal(header.sequenceNumber)) { return false; } @@ -323,7 +367,7 @@ bool StreamStatisticianImpl::IsRetransmitOfOldPacket( } bool StreamStatisticianImpl::IsPacketInOrder(uint16_t sequence_number) const { - CriticalSectionScoped cs(crit_sect_.get()); + CriticalSectionScoped cs(stream_lock_.get()); return InOrderPacketInternal(sequence_number); } @@ -348,8 +392,10 @@ ReceiveStatistics* ReceiveStatistics::Create(Clock* clock) { ReceiveStatisticsImpl::ReceiveStatisticsImpl(Clock* clock) : clock_(clock), - crit_sect_(CriticalSectionWrapper::CreateCriticalSection()), - last_rate_update_ms_(0) {} + receive_statistics_lock_(CriticalSectionWrapper::CreateCriticalSection()), + last_rate_update_ms_(0), + rtcp_stats_callback_(NULL), + rtp_stats_callback_(NULL) {} ReceiveStatisticsImpl::~ReceiveStatisticsImpl() { while (!statisticians_.empty()) { @@ -359,20 +405,31 @@ ReceiveStatisticsImpl::~ReceiveStatisticsImpl() { } void ReceiveStatisticsImpl::IncomingPacket(const RTPHeader& header, - size_t bytes, bool old_packet) { - CriticalSectionScoped cs(crit_sect_.get()); - StatisticianImplMap::iterator it = statisticians_.find(header.ssrc); - if (it == statisticians_.end()) { - std::pair insert_result = - statisticians_.insert(std::make_pair( - header.ssrc, new StreamStatisticianImpl(clock_))); - it = insert_result.first; + size_t bytes, + bool retransmitted) { + StatisticianImplMap::iterator it; + { + CriticalSectionScoped cs(receive_statistics_lock_.get()); + it = statisticians_.find(header.ssrc); + if (it == statisticians_.end()) { + std::pair insert_result = + statisticians_.insert(std::make_pair( + header.ssrc, new StreamStatisticianImpl(clock_, this, this))); + it = insert_result.first; + } } - statisticians_[header.ssrc]->IncomingPacket(header, bytes, old_packet); + it->second->IncomingPacket(header, bytes, retransmitted); +} + +void ReceiveStatisticsImpl::FecPacketReceived(uint32_t ssrc) { + CriticalSectionScoped cs(receive_statistics_lock_.get()); + StatisticianImplMap::iterator it = statisticians_.find(ssrc); + assert(it != statisticians_.end()); + it->second->FecPacketReceived(); } void ReceiveStatisticsImpl::ChangeSsrc(uint32_t from_ssrc, uint32_t to_ssrc) { - CriticalSectionScoped cs(crit_sect_.get()); + CriticalSectionScoped cs(receive_statistics_lock_.get()); StatisticianImplMap::iterator from_it = statisticians_.find(from_ssrc); if (from_it == statisticians_.end()) return; @@ -383,7 +440,7 @@ void ReceiveStatisticsImpl::ChangeSsrc(uint32_t from_ssrc, uint32_t to_ssrc) { } StatisticianMap ReceiveStatisticsImpl::GetActiveStatisticians() const { - CriticalSectionScoped cs(crit_sect_.get()); + CriticalSectionScoped cs(receive_statistics_lock_.get()); StatisticianMap active_statisticians; for (StatisticianImplMap::const_iterator it = statisticians_.begin(); it != statisticians_.end(); ++it) { @@ -400,7 +457,7 @@ StatisticianMap ReceiveStatisticsImpl::GetActiveStatisticians() const { StreamStatistician* ReceiveStatisticsImpl::GetStatistician( uint32_t ssrc) const { - CriticalSectionScoped cs(crit_sect_.get()); + CriticalSectionScoped cs(receive_statistics_lock_.get()); StatisticianImplMap::const_iterator it = statisticians_.find(ssrc); if (it == statisticians_.end()) return NULL; @@ -409,7 +466,7 @@ StreamStatistician* ReceiveStatisticsImpl::GetStatistician( void ReceiveStatisticsImpl::SetMaxReorderingThreshold( int max_reordering_threshold) { - CriticalSectionScoped cs(crit_sect_.get()); + CriticalSectionScoped cs(receive_statistics_lock_.get()); for (StatisticianImplMap::iterator it = statisticians_.begin(); it != statisticians_.end(); ++it) { it->second->SetMaxReorderingThreshold(max_reordering_threshold); @@ -417,7 +474,7 @@ void ReceiveStatisticsImpl::SetMaxReorderingThreshold( } int32_t ReceiveStatisticsImpl::Process() { - CriticalSectionScoped cs(crit_sect_.get()); + CriticalSectionScoped cs(receive_statistics_lock_.get()); for (StatisticianImplMap::iterator it = statisticians_.begin(); it != statisticians_.end(); ++it) { it->second->ProcessBitrate(); @@ -427,17 +484,50 @@ int32_t ReceiveStatisticsImpl::Process() { } int32_t ReceiveStatisticsImpl::TimeUntilNextProcess() { - CriticalSectionScoped cs(crit_sect_.get()); + CriticalSectionScoped cs(receive_statistics_lock_.get()); int time_since_last_update = clock_->TimeInMilliseconds() - last_rate_update_ms_; return std::max(kStatisticsProcessIntervalMs - time_since_last_update, 0); } +void ReceiveStatisticsImpl::RegisterRtcpStatisticsCallback( + RtcpStatisticsCallback* callback) { + CriticalSectionScoped cs(receive_statistics_lock_.get()); + if (callback != NULL) + assert(rtcp_stats_callback_ == NULL); + rtcp_stats_callback_ = callback; +} + +void ReceiveStatisticsImpl::StatisticsUpdated(const RtcpStatistics& statistics, + uint32_t ssrc) { + CriticalSectionScoped cs(receive_statistics_lock_.get()); + if (rtcp_stats_callback_) { + rtcp_stats_callback_->StatisticsUpdated(statistics, ssrc); + } +} + +void ReceiveStatisticsImpl::RegisterRtpStatisticsCallback( + StreamDataCountersCallback* callback) { + CriticalSectionScoped cs(receive_statistics_lock_.get()); + if (callback != NULL) + assert(rtp_stats_callback_ == NULL); + rtp_stats_callback_ = callback; +} + +void ReceiveStatisticsImpl::DataCountersUpdated(const StreamDataCounters& stats, + uint32_t ssrc) { + CriticalSectionScoped cs(receive_statistics_lock_.get()); + if (rtp_stats_callback_) { + rtp_stats_callback_->DataCountersUpdated(stats, ssrc); + } +} void NullReceiveStatistics::IncomingPacket(const RTPHeader& rtp_header, size_t bytes, bool retransmitted) {} +void NullReceiveStatistics::FecPacketReceived(uint32_t ssrc) {} + StatisticianMap NullReceiveStatistics::GetActiveStatisticians() const { return StatisticianMap(); } @@ -454,4 +544,10 @@ int32_t NullReceiveStatistics::TimeUntilNextProcess() { return 0; } int32_t NullReceiveStatistics::Process() { return 0; } +void NullReceiveStatistics::RegisterRtcpStatisticsCallback( + RtcpStatisticsCallback* callback) {} + +void NullReceiveStatistics::RegisterRtpStatisticsCallback( + StreamDataCountersCallback* callback) {} + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h index 0af074c4a139..4aa41f349e5a 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h @@ -25,37 +25,48 @@ class CriticalSectionWrapper; class StreamStatisticianImpl : public StreamStatistician { public: - explicit StreamStatisticianImpl(Clock* clock); - + StreamStatisticianImpl(Clock* clock, + RtcpStatisticsCallback* rtcp_callback, + StreamDataCountersCallback* rtp_callback); virtual ~StreamStatisticianImpl() {} - virtual bool GetStatistics(Statistics* statistics, bool reset) OVERRIDE; + virtual bool GetStatistics(RtcpStatistics* statistics, bool reset) OVERRIDE; virtual void GetDataCounters(uint32_t* bytes_received, uint32_t* packets_received) const OVERRIDE; virtual uint32_t BitrateReceived() const OVERRIDE; virtual void ResetStatistics() OVERRIDE; virtual bool IsRetransmitOfOldPacket(const RTPHeader& header, - int min_rtt) const OVERRIDE; + int min_rtt) const OVERRIDE; virtual bool IsPacketInOrder(uint16_t sequence_number) const OVERRIDE; - void IncomingPacket(const RTPHeader& rtp_header, size_t bytes, + void IncomingPacket(const RTPHeader& rtp_header, + size_t bytes, bool retransmitted); + void FecPacketReceived(); void SetMaxReorderingThreshold(int max_reordering_threshold); void ProcessBitrate(); virtual void LastReceiveTimeNtp(uint32_t* secs, uint32_t* frac) const; private: bool InOrderPacketInternal(uint16_t sequence_number) const; + RtcpStatistics CalculateRtcpStatistics(); + void UpdateJitter(const RTPHeader& header, + uint32_t receive_time_secs, + uint32_t receive_time_frac); + void UpdateCounters(const RTPHeader& rtp_header, + size_t bytes, + bool retransmitted); + void NotifyRtpCallback() LOCKS_EXCLUDED(stream_lock_.get()); + void NotifyRtcpCallback() LOCKS_EXCLUDED(stream_lock_.get()); Clock* clock_; - scoped_ptr crit_sect_; + scoped_ptr stream_lock_; Bitrate incoming_bitrate_; uint32_t ssrc_; int max_reordering_threshold_; // In number of packets or sequence numbers. // Stats on received RTP packets. uint32_t jitter_q4_; - uint32_t jitter_max_q4_; uint32_t cumulative_loss_; uint32_t jitter_q4_transmission_time_offset_; @@ -67,30 +78,34 @@ class StreamStatisticianImpl : public StreamStatistician { uint16_t received_seq_first_; uint16_t received_seq_max_; uint16_t received_seq_wraps_; - bool first_packet_; // Current counter values. uint16_t received_packet_overhead_; - uint32_t received_byte_count_; - uint32_t received_retransmitted_packets_; - uint32_t received_inorder_packet_count_; + StreamDataCounters receive_counters_; // Counter values when we sent the last report. uint32_t last_report_inorder_packets_; uint32_t last_report_old_packets_; uint16_t last_report_seq_max_; - Statistics last_reported_statistics_; + RtcpStatistics last_reported_statistics_; + + RtcpStatisticsCallback* const rtcp_callback_; + StreamDataCountersCallback* const rtp_callback_; }; -class ReceiveStatisticsImpl : public ReceiveStatistics { +class ReceiveStatisticsImpl : public ReceiveStatistics, + public RtcpStatisticsCallback, + public StreamDataCountersCallback { public: explicit ReceiveStatisticsImpl(Clock* clock); ~ReceiveStatisticsImpl(); // Implement ReceiveStatistics. - virtual void IncomingPacket(const RTPHeader& header, size_t bytes, - bool old_packet) OVERRIDE; + virtual void IncomingPacket(const RTPHeader& header, + size_t bytes, + bool retransmitted) OVERRIDE; + virtual void FecPacketReceived(uint32_t ssrc) OVERRIDE; virtual StatisticianMap GetActiveStatisticians() const OVERRIDE; virtual StreamStatistician* GetStatistician(uint32_t ssrc) const OVERRIDE; virtual void SetMaxReorderingThreshold(int max_reordering_threshold) OVERRIDE; @@ -101,13 +116,27 @@ class ReceiveStatisticsImpl : public ReceiveStatistics { void ChangeSsrc(uint32_t from_ssrc, uint32_t to_ssrc); + virtual void RegisterRtcpStatisticsCallback(RtcpStatisticsCallback* callback) + OVERRIDE; + + virtual void RegisterRtpStatisticsCallback( + StreamDataCountersCallback* callback) OVERRIDE; + private: + virtual void StatisticsUpdated(const RtcpStatistics& statistics, + uint32_t ssrc) OVERRIDE; + virtual void DataCountersUpdated(const StreamDataCounters& counters, + uint32_t ssrc) OVERRIDE; + typedef std::map StatisticianImplMap; Clock* clock_; - scoped_ptr crit_sect_; + scoped_ptr receive_statistics_lock_; int64_t last_rate_update_ms_; StatisticianImplMap statisticians_; + + RtcpStatisticsCallback* rtcp_stats_callback_; + StreamDataCountersCallback* rtp_stats_callback_; }; } // namespace webrtc #endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RECEIVE_STATISTICS_IMPL_H_ diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc index 39c5c6d748aa..f0b9dedde576 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc @@ -20,7 +20,6 @@ const int kPacketSize1 = 100; const int kPacketSize2 = 300; const uint32_t kSsrc1 = 1; const uint32_t kSsrc2 = 2; -const uint32_t kSsrc3 = 3; class ReceiveStatisticsTest : public ::testing::Test { public: @@ -29,10 +28,10 @@ class ReceiveStatisticsTest : public ::testing::Test { receive_statistics_(ReceiveStatistics::Create(&clock_)) { memset(&header1_, 0, sizeof(header1_)); header1_.ssrc = kSsrc1; - header1_.sequenceNumber = 0; + header1_.sequenceNumber = 100; memset(&header2_, 0, sizeof(header2_)); header2_.ssrc = kSsrc2; - header2_.sequenceNumber = 0; + header2_.sequenceNumber = 100; } protected: @@ -132,4 +131,173 @@ TEST_F(ReceiveStatisticsTest, ActiveStatisticians) { EXPECT_EQ(200u, bytes_received); EXPECT_EQ(2u, packets_received); } + +TEST_F(ReceiveStatisticsTest, RtcpCallbacks) { + class TestCallback : public RtcpStatisticsCallback { + public: + TestCallback() + : RtcpStatisticsCallback(), num_calls_(0), ssrc_(0), stats_() {} + virtual ~TestCallback() {} + + virtual void StatisticsUpdated(const RtcpStatistics& statistics, + uint32_t ssrc) { + ssrc_ = ssrc; + stats_ = statistics; + ++num_calls_; + } + + uint32_t num_calls_; + uint32_t ssrc_; + RtcpStatistics stats_; + } callback; + + receive_statistics_->RegisterRtcpStatisticsCallback(&callback); + + // Add some arbitrary data, with loss and jitter. + header1_.sequenceNumber = 1; + clock_.AdvanceTimeMilliseconds(7); + header1_.timestamp += 3; + receive_statistics_->IncomingPacket(header1_, kPacketSize1, false); + header1_.sequenceNumber += 2; + clock_.AdvanceTimeMilliseconds(9); + header1_.timestamp += 9; + receive_statistics_->IncomingPacket(header1_, kPacketSize1, false); + --header1_.sequenceNumber; + clock_.AdvanceTimeMilliseconds(13); + header1_.timestamp += 47; + receive_statistics_->IncomingPacket(header1_, kPacketSize1, true); + header1_.sequenceNumber += 3; + clock_.AdvanceTimeMilliseconds(11); + header1_.timestamp += 17; + receive_statistics_->IncomingPacket(header1_, kPacketSize1, false); + ++header1_.sequenceNumber; + + EXPECT_EQ(0u, callback.num_calls_); + + // Call GetStatistics, simulating a timed rtcp sender thread. + RtcpStatistics statistics; + receive_statistics_->GetStatistician(kSsrc1) + ->GetStatistics(&statistics, true); + + EXPECT_EQ(1u, callback.num_calls_); + EXPECT_EQ(callback.ssrc_, kSsrc1); + EXPECT_EQ(statistics.cumulative_lost, callback.stats_.cumulative_lost); + EXPECT_EQ(statistics.extended_max_sequence_number, + callback.stats_.extended_max_sequence_number); + EXPECT_EQ(statistics.fraction_lost, callback.stats_.fraction_lost); + EXPECT_EQ(statistics.jitter, callback.stats_.jitter); + EXPECT_EQ(51, statistics.fraction_lost); + EXPECT_EQ(1u, statistics.cumulative_lost); + EXPECT_EQ(5u, statistics.extended_max_sequence_number); + EXPECT_EQ(4u, statistics.jitter); + + receive_statistics_->RegisterRtcpStatisticsCallback(NULL); + + // Add some more data. + header1_.sequenceNumber = 1; + clock_.AdvanceTimeMilliseconds(7); + header1_.timestamp += 3; + receive_statistics_->IncomingPacket(header1_, kPacketSize1, false); + header1_.sequenceNumber += 2; + clock_.AdvanceTimeMilliseconds(9); + header1_.timestamp += 9; + receive_statistics_->IncomingPacket(header1_, kPacketSize1, false); + --header1_.sequenceNumber; + clock_.AdvanceTimeMilliseconds(13); + header1_.timestamp += 47; + receive_statistics_->IncomingPacket(header1_, kPacketSize1, true); + header1_.sequenceNumber += 3; + clock_.AdvanceTimeMilliseconds(11); + header1_.timestamp += 17; + receive_statistics_->IncomingPacket(header1_, kPacketSize1, false); + ++header1_.sequenceNumber; + + receive_statistics_->GetStatistician(kSsrc1) + ->GetStatistics(&statistics, true); + + // Should not have been called after deregister. + EXPECT_EQ(1u, callback.num_calls_); +} + +TEST_F(ReceiveStatisticsTest, RtpCallbacks) { + class TestCallback : public StreamDataCountersCallback { + public: + TestCallback() + : StreamDataCountersCallback(), num_calls_(0), ssrc_(0), stats_() {} + virtual ~TestCallback() {} + + virtual void DataCountersUpdated(const StreamDataCounters& counters, + uint32_t ssrc) { + ssrc_ = ssrc; + stats_ = counters; + ++num_calls_; + } + + void ExpectMatches(uint32_t num_calls, + uint32_t ssrc, + uint32_t bytes, + uint32_t padding, + uint32_t packets, + uint32_t retransmits, + uint32_t fec) { + EXPECT_EQ(num_calls, num_calls_); + EXPECT_EQ(ssrc, ssrc_); + EXPECT_EQ(bytes, stats_.bytes); + EXPECT_EQ(padding, stats_.padding_bytes); + EXPECT_EQ(packets, stats_.packets); + EXPECT_EQ(retransmits, stats_.retransmitted_packets); + EXPECT_EQ(fec, stats_.fec_packets); + } + + uint32_t num_calls_; + uint32_t ssrc_; + StreamDataCounters stats_; + } callback; + + receive_statistics_->RegisterRtpStatisticsCallback(&callback); + + const uint32_t kHeaderLength = 20; + const uint32_t kPaddingLength = 9; + + // One packet of size kPacketSize1. + header1_.headerLength = kHeaderLength; + receive_statistics_->IncomingPacket( + header1_, kPacketSize1 + kHeaderLength, false); + callback.ExpectMatches(1, kSsrc1, kPacketSize1, 0, 1, 0, 0); + + ++header1_.sequenceNumber; + clock_.AdvanceTimeMilliseconds(5); + header1_.paddingLength = 9; + // Another packet of size kPacketSize1 with 9 bytes padding. + receive_statistics_->IncomingPacket( + header1_, kPacketSize1 + kHeaderLength + kPaddingLength, false); + callback.ExpectMatches(2, kSsrc1, 2 * kPacketSize1, kPaddingLength, 2, 0, 0); + + clock_.AdvanceTimeMilliseconds(5); + // Retransmit last packet. + receive_statistics_->IncomingPacket( + header1_, kPacketSize1 + kHeaderLength + kPaddingLength, true); + callback.ExpectMatches( + 3, kSsrc1, 3 * kPacketSize1, kPaddingLength * 2, 3, 1, 0); + + header1_.paddingLength = 0; + ++header1_.sequenceNumber; + clock_.AdvanceTimeMilliseconds(5); + // One recovered packet. + receive_statistics_->IncomingPacket( + header1_, kPacketSize1 + kHeaderLength, false); + receive_statistics_->FecPacketReceived(kSsrc1); + callback.ExpectMatches( + 5, kSsrc1, 4 * kPacketSize1, kPaddingLength * 2, 4, 1, 1); + + receive_statistics_->RegisterRtpStatisticsCallback(NULL); + + // New stats, but callback should not be called. + ++header1_.sequenceNumber; + clock_.AdvanceTimeMilliseconds(5); + receive_statistics_->IncomingPacket( + header1_, kPacketSize1 + kHeaderLength, true); + callback.ExpectMatches( + 5, kSsrc1, 4 * kPacketSize1, kPaddingLength * 2, 4, 1, 1); +} } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc index cbd899394f52..68f61371395f 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc @@ -58,6 +58,8 @@ class TestTransport : public Transport { class RtcpFormatRembTest : public ::testing::Test { protected: + static const uint32_t kRemoteBitrateEstimatorMinBitrateBps = 30000; + RtcpFormatRembTest() : over_use_detector_options_(), system_clock_(Clock::GetRealTimeClock()), @@ -66,7 +68,8 @@ class RtcpFormatRembTest : public ::testing::Test { remote_bitrate_estimator_( RemoteBitrateEstimatorFactory().Create( &remote_bitrate_observer_, - system_clock_)) {} + system_clock_, + kRemoteBitrateEstimatorMinBitrateBps)) {} virtual void SetUp(); virtual void TearDown(); diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc index 5b459197f3df..d37f63e07d3b 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc @@ -13,17 +13,14 @@ #include //assert #include //memset +#include + #include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h" #include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h" #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" #include "webrtc/system_wrappers/interface/trace.h" #include "webrtc/system_wrappers/interface/trace_event.h" -namespace -{ - const float FRAC = 4.294967296E9; -} - namespace webrtc { using namespace RTCPUtility; using namespace RTCPHelp; @@ -51,11 +48,14 @@ RTCPReceiver::RTCPReceiver(const int32_t id, Clock* clock, _remoteSenderInfo(), _lastReceivedSRNTPsecs(0), _lastReceivedSRNTPfrac(0), + _lastReceivedXRNTPsecs(0), + _lastReceivedXRNTPfrac(0), + xr_rr_rtt_ms_(0), _receivedInfoMap(), _packetTimeOutMS(0), _lastReceivedRrMs(0), _lastIncreasedSequenceNumberMs(0), - _rtt(0) { + stats_callback_(NULL) { memset(&_remoteSenderInfo, 0, sizeof(_remoteSenderInfo)); WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id, "%s created", __FUNCTION__); } @@ -217,21 +217,15 @@ int32_t RTCPReceiver::RTT(uint32_t remoteSSRC, return 0; } -uint16_t RTCPReceiver::RTT() const { +bool RTCPReceiver::GetAndResetXrRrRtt(uint16_t* rtt_ms) { + assert(rtt_ms); CriticalSectionScoped lock(_criticalSectionRTCPReceiver); - if (!_receivedReportBlockMap.empty()) { - return 0; + if (xr_rr_rtt_ms_ == 0) { + return false; } - return _rtt; -} - -int RTCPReceiver::SetRTT(uint16_t rtt) { - CriticalSectionScoped lock(_criticalSectionRTCPReceiver); - if (!_receivedReportBlockMap.empty()) { - return -1; - } - _rtt = rtt; - return 0; + *rtt_ms = xr_rr_rtt_ms_; + xr_rr_rtt_ms_ = 0; + return true; } int32_t RTCPReceiver::GetReportBlockInfo(uint32_t remoteSSRC, @@ -285,6 +279,30 @@ RTCPReceiver::NTP(uint32_t *ReceivedNTPsecs, return 0; } +bool RTCPReceiver::LastReceivedXrReferenceTimeInfo( + RtcpReceiveTimeInfo* info) const { + assert(info); + CriticalSectionScoped lock(_criticalSectionRTCPReceiver); + if (_lastReceivedXRNTPsecs == 0 && _lastReceivedXRNTPfrac == 0) { + return false; + } + + info->sourceSSRC = _remoteXRReceiveTimeInfo.sourceSSRC; + info->lastRR = _remoteXRReceiveTimeInfo.lastRR; + + // Get the delay since last received report (RFC 3611). + uint32_t receive_time = RTCPUtility::MidNtp(_lastReceivedXRNTPsecs, + _lastReceivedXRNTPfrac); + + uint32_t ntp_sec = 0; + uint32_t ntp_frac = 0; + _clock->CurrentNtp(ntp_sec, ntp_frac); + uint32_t now = RTCPUtility::MidNtp(ntp_sec, ntp_frac); + + info->delaySinceLastRR = now - receive_time; + return true; +} + int32_t RTCPReceiver::SenderInfoReceived(RTCPSenderInfo* senderInfo) const { @@ -342,6 +360,15 @@ RTCPReceiver::IncomingRTCPPacket(RTCPPacketInformation& rtcpPacketInformation, case RTCPUtility::kRtcpSdesCode: HandleSDES(*rtcpParser); break; + case RTCPUtility::kRtcpXrHeaderCode: + HandleXrHeader(*rtcpParser, rtcpPacketInformation); + break; + case RTCPUtility::kRtcpXrReceiverReferenceTimeCode: + HandleXrReceiveReferenceTime(*rtcpParser, rtcpPacketInformation); + break; + case RTCPUtility::kRtcpXrDlrrReportBlockCode: + HandleXrDlrrReportBlock(*rtcpParser, rtcpPacketInformation); + break; case RTCPUtility::kRtcpXrVoipMetricCode: HandleXRVOIPMetric(*rtcpParser, rtcpPacketInformation); break; @@ -474,11 +501,12 @@ RTCPReceiver::HandleSenderReceiverReport(RTCPUtility::RTCPParserV2& rtcpParser, } // no need for critsect we have _criticalSectionRTCPReceiver -void -RTCPReceiver::HandleReportBlock(const RTCPUtility::RTCPPacket& rtcpPacket, - RTCPPacketInformation& rtcpPacketInformation, - const uint32_t remoteSSRC, - const uint8_t numberOfReportBlocks) { +void RTCPReceiver::HandleReportBlock( + const RTCPUtility::RTCPPacket& rtcpPacket, + RTCPPacketInformation& rtcpPacketInformation, + const uint32_t remoteSSRC, + const uint8_t numberOfReportBlocks) + EXCLUSIVE_LOCKS_REQUIRED(_criticalSectionRTCPReceiver) { // This will be called once per report block in the RTCP packet. // We filter out all report blocks that are not for us. // Each packet has max 31 RR blocks. @@ -896,9 +924,90 @@ void RTCPReceiver::HandleBYE(RTCPUtility::RTCPParserV2& rtcpParser) { delete cnameInfoIt->second; _receivedCnameMap.erase(cnameInfoIt); } + xr_rr_rtt_ms_ = 0; rtcpParser.Iterate(); } +void RTCPReceiver::HandleXrHeader( + RTCPUtility::RTCPParserV2& parser, + RTCPPacketInformation& rtcpPacketInformation) { + const RTCPUtility::RTCPPacket& packet = parser.Packet(); + + rtcpPacketInformation.xr_originator_ssrc = packet.XR.OriginatorSSRC; + + parser.Iterate(); +} + +void RTCPReceiver::HandleXrReceiveReferenceTime( + RTCPUtility::RTCPParserV2& parser, + RTCPPacketInformation& rtcpPacketInformation) { + const RTCPUtility::RTCPPacket& packet = parser.Packet(); + + _remoteXRReceiveTimeInfo.sourceSSRC = + rtcpPacketInformation.xr_originator_ssrc; + + _remoteXRReceiveTimeInfo.lastRR = RTCPUtility::MidNtp( + packet.XRReceiverReferenceTimeItem.NTPMostSignificant, + packet.XRReceiverReferenceTimeItem.NTPLeastSignificant); + + _clock->CurrentNtp(_lastReceivedXRNTPsecs, _lastReceivedXRNTPfrac); + + rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpXrReceiverReferenceTime; + + parser.Iterate(); +} + +void RTCPReceiver::HandleXrDlrrReportBlock( + RTCPUtility::RTCPParserV2& parser, + RTCPPacketInformation& rtcpPacketInformation) { + const RTCPUtility::RTCPPacket& packet = parser.Packet(); + // Iterate through sub-block(s), if any. + RTCPUtility::RTCPPacketTypes packet_type = parser.Iterate(); + + while (packet_type == RTCPUtility::kRtcpXrDlrrReportBlockItemCode) { + HandleXrDlrrReportBlockItem(packet, rtcpPacketInformation); + packet_type = parser.Iterate(); + } +} + +void RTCPReceiver::HandleXrDlrrReportBlockItem( + const RTCPUtility::RTCPPacket& packet, + RTCPPacketInformation& rtcpPacketInformation) + EXCLUSIVE_LOCKS_REQUIRED(_criticalSectionRTCPReceiver) { + if (registered_ssrcs_.find(packet.XRDLRRReportBlockItem.SSRC) == + registered_ssrcs_.end()) { + // Not to us. + return; + } + + rtcpPacketInformation.xr_dlrr_item = true; + + // To avoid problem with acquiring _criticalSectionRTCPSender while holding + // _criticalSectionRTCPReceiver. + _criticalSectionRTCPReceiver->Leave(); + + int64_t send_time_ms; + bool found = _rtpRtcp.SendTimeOfXrRrReport( + packet.XRDLRRReportBlockItem.LastRR, &send_time_ms); + + _criticalSectionRTCPReceiver->Enter(); + + if (!found) { + return; + } + + // The DelayLastRR field is in units of 1/65536 sec. + uint32_t delay_rr_ms = + (((packet.XRDLRRReportBlockItem.DelayLastRR & 0x0000ffff) * 1000) >> 16) + + (((packet.XRDLRRReportBlockItem.DelayLastRR & 0xffff0000) >> 16) * 1000); + + int32_t rtt = _clock->CurrentNtpInMilliseconds() - delay_rr_ms - send_time_ms; + + xr_rr_rtt_ms_ = static_cast(std::max(rtt, 1)); + + rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpXrDlrrReportBlock; +} + // no need for critsect we have _criticalSectionRTCPReceiver void RTCPReceiver::HandleXRVOIPMetric(RTCPUtility::RTCPParserV2& rtcpParser, @@ -1284,6 +1393,19 @@ int32_t RTCPReceiver::UpdateTMMBR() { return 0; } +void RTCPReceiver::RegisterRtcpStatisticsCallback( + RtcpStatisticsCallback* callback) { + CriticalSectionScoped cs(_criticalSectionFeedbacks); + if (callback != NULL) + assert(stats_callback_ == NULL); + stats_callback_ = callback; +} + +RtcpStatisticsCallback* RTCPReceiver::GetRtcpStatisticsCallback() { + CriticalSectionScoped cs(_criticalSectionFeedbacks); + return stats_callback_; +} + // Holding no Critical section void RTCPReceiver::TriggerCallbacksFromRTCPPacket( RTCPPacketInformation& rtcpPacketInformation) { @@ -1378,6 +1500,24 @@ void RTCPReceiver::TriggerCallbacksFromRTCPPacket( } } } + + { + CriticalSectionScoped cs(_criticalSectionFeedbacks); + if (stats_callback_) { + for (ReportBlockList::const_iterator it = + rtcpPacketInformation.report_blocks.begin(); + it != rtcpPacketInformation.report_blocks.end(); + ++it) { + RtcpStatistics stats; + stats.cumulative_lost = it->cumulativeLost; + stats.extended_max_sequence_number = it->extendedHighSeqNum; + stats.fraction_lost = it->fractionLost; + stats.jitter = it->jitter; + + stats_callback_->StatisticsUpdated(stats, local_ssrc); + } + } + } } int32_t RTCPReceiver::CNAME(const uint32_t remoteSSRC, diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h index ca17282066a7..b940d3e10f13 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h @@ -69,6 +69,8 @@ public: uint32_t *RTCPArrivalTimeFrac, uint32_t *rtcp_timestamp) const; + bool LastReceivedXrReferenceTimeInfo(RtcpReceiveTimeInfo* info) const; + // get rtt int32_t RTT(uint32_t remoteSSRC, uint16_t* RTT, @@ -76,10 +78,6 @@ public: uint16_t* minRTT, uint16_t* maxRTT) const; - uint16_t RTT() const; - - int SetRTT(uint16_t rtt); - int32_t ResetRTT(const uint32_t remoteSSRC); int32_t GetReportBlockInfo(uint32_t remoteSSRC, @@ -90,6 +88,8 @@ public: int32_t SenderInfoReceived(RTCPSenderInfo* senderInfo) const; + bool GetAndResetXrRrRtt(uint16_t* rtt_ms); + // get statistics int32_t StatisticsReceived( std::vector* receiveBlocks) const; @@ -115,6 +115,9 @@ public: int32_t UpdateTMMBR(); + void RegisterRtcpStatisticsCallback(RtcpStatisticsCallback* callback); + RtcpStatisticsCallback* GetRtcpStatisticsCallback(); + protected: RTCPHelp::RTCPReportBlockInformation* CreateReportBlockInformation(const uint32_t remoteSSRC); RTCPHelp::RTCPReportBlockInformation* GetReportBlockInformation(const uint32_t remoteSSRC) const; @@ -139,6 +142,21 @@ protected: void HandleSDESChunk(RTCPUtility::RTCPParserV2& rtcpParser); + void HandleXrHeader(RTCPUtility::RTCPParserV2& parser, + RTCPHelp::RTCPPacketInformation& rtcpPacketInformation); + + void HandleXrReceiveReferenceTime( + RTCPUtility::RTCPParserV2& parser, + RTCPHelp::RTCPPacketInformation& rtcpPacketInformation); + + void HandleXrDlrrReportBlock( + RTCPUtility::RTCPParserV2& parser, + RTCPHelp::RTCPPacketInformation& rtcpPacketInformation); + + void HandleXrDlrrReportBlockItem( + const RTCPUtility::RTCPPacket& packet, + RTCPHelp::RTCPPacketInformation& rtcpPacketInformation); + void HandleXRVOIPMetric(RTCPUtility::RTCPParserV2& rtcpParser, RTCPHelp::RTCPPacketInformation& rtcpPacketInformation); @@ -229,6 +247,14 @@ protected: uint32_t _lastReceivedSRNTPsecs; uint32_t _lastReceivedSRNTPfrac; + // Received XR receive time report. + RtcpReceiveTimeInfo _remoteXRReceiveTimeInfo; + // Time when the report was received. + uint32_t _lastReceivedXRNTPsecs; + uint32_t _lastReceivedXRNTPfrac; + // Estimated rtt, zero when there is no valid estimate. + uint16_t xr_rr_rtt_ms_; + // Received report blocks. std::map _receivedReportBlockMap; @@ -245,10 +271,7 @@ protected: // delivered RTP packet to the remote side. int64_t _lastIncreasedSequenceNumberMs; - // Externally set RTT. This value can only be used if there are no valid - // RTT estimates. - uint16_t _rtt; - + RtcpStatisticsCallback* stats_callback_; }; } // namespace webrtc #endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_RECEIVER_H_ diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.cc index eab7232e526d..08c7b4bbe8b0 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.cc @@ -34,6 +34,8 @@ RTCPPacketInformation::RTCPPacketInformation() ntp_secs(0), ntp_frac(0), rtp_timestamp(0), + xr_originator_ssrc(0), + xr_dlrr_item(false), VoIPMetric(NULL) { } diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h index 2d53bb736dab..f1aff59184d2 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h @@ -84,6 +84,8 @@ public: uint32_t ntp_frac; uint32_t rtp_timestamp; + uint32_t xr_originator_ssrc; + bool xr_dlrr_item; RTCPVoIPMetric* VoIPMetric; private: diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc index b63f5ebb5945..b3f15bb96625 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc @@ -35,16 +35,18 @@ class PacketBuilder { struct ReportBlock { ReportBlock(uint32_t ssrc, uint32_t extended_max, uint8_t fraction_loss, - uint32_t cumulative_loss) + uint32_t cumulative_loss, uint32_t jitter) : ssrc(ssrc), extended_max(extended_max), fraction_loss(fraction_loss), - cumulative_loss(cumulative_loss) {} + cumulative_loss(cumulative_loss), + jitter(jitter) {} uint32_t ssrc; uint32_t extended_max; uint8_t fraction_loss; uint32_t cumulative_loss; + uint32_t jitter; }; PacketBuilder() @@ -108,9 +110,9 @@ class PacketBuilder { void AddRrPacket(uint32_t sender_ssrc, uint32_t rtp_ssrc, uint32_t extended_max, uint8_t fraction_loss, - uint32_t cumulative_loss) { + uint32_t cumulative_loss, uint32_t jitter) { ReportBlock report_block(rtp_ssrc, extended_max, fraction_loss, - cumulative_loss); + cumulative_loss, jitter); std::list report_block_vector(&report_block, &report_block + 1); AddRrPacketMultipleReportBlocks(sender_ssrc, report_block_vector); @@ -123,20 +125,70 @@ class PacketBuilder { for (std::list::const_iterator it = report_blocks.begin(); it != report_blocks.end(); ++it) { AddReportBlock(it->ssrc, it->extended_max, it->fraction_loss, - it->cumulative_loss); + it->cumulative_loss, it->jitter); } } void AddReportBlock(uint32_t rtp_ssrc, uint32_t extended_max, - uint8_t fraction_loss, uint32_t cumulative_loss) { + uint8_t fraction_loss, uint32_t cumulative_loss, + uint32_t jitter) { Add32(rtp_ssrc); Add32((fraction_loss << 24) + cumulative_loss); Add32(extended_max); - Add32(0); // Jitter. + Add32(jitter); Add32(0); // Last SR. Add32(0); // Delay since last SR. } + void AddXrHeader(uint32_t sender_ssrc) { + AddRtcpHeader(207, 0); + Add32(sender_ssrc); + } + + void AddXrReceiverReferenceTimeBlock(uint32_t ntp_sec, uint32_t ntp_frac) { + Add8(4); // Block type. + Add8(0); // Reserved. + Add16(2); // Length. + Add64(ntp_sec, ntp_frac); // NTP timestamp. + } + + void AddXrDlrrBlock(std::vector& remote_ssrc) { + ASSERT_LT(pos_ + 4 + static_cast(remote_ssrc.size())*4, + kMaxPacketSize-1) << "Max buffer size reached."; + Add8(5); // Block type. + Add8(0); // Reserved. + Add16(remote_ssrc.size() * 3); // Length. + for (size_t i = 0; i < remote_ssrc.size(); ++i) { + Add32(remote_ssrc.at(i)); // Receiver SSRC. + Add32(0x10203); // Last RR. + Add32(0x40506); // Delay since last RR. + } + } + + void AddXrUnknownBlock() { + Add8(6); // Block type. + Add8(0); // Reserved. + Add16(9); // Length. + Add32(0); // Receiver SSRC. + Add64(0, 0); // Remaining fields (RFC 3611) are set to zero. + Add64(0, 0); + Add64(0, 0); + Add64(0, 0); + } + + void AddXrVoipBlock(uint32_t remote_ssrc, uint8_t loss) { + Add8(7); // Block type. + Add8(0); // Reserved. + Add16(8); // Length. + Add32(remote_ssrc); // Receiver SSRC. + Add8(loss); // Loss rate. + Add8(0); // Remaining statistics (RFC 3611) are set to zero. + Add16(0); + Add64(0, 0); + Add64(0, 0); + Add64(0, 0); + } + const uint8_t* packet() { PatchLengthField(); return buffer_; @@ -199,6 +251,8 @@ class TestTransport : public Transport, class RtcpReceiverTest : public ::testing::Test { protected: + static const uint32_t kRemoteBitrateEstimatorMinBitrateBps = 30000; + RtcpReceiverTest() : over_use_detector_options_(), system_clock_(1335900000), @@ -206,7 +260,8 @@ class RtcpReceiverTest : public ::testing::Test { remote_bitrate_estimator_( RemoteBitrateEstimatorFactory().Create( &remote_bitrate_observer_, - &system_clock_)) { + &system_clock_, + kRemoteBitrateEstimatorMinBitrateBps)) { test_transport_ = new TestTransport(); RtpRtcp::Configuration configuration; @@ -228,14 +283,15 @@ class RtcpReceiverTest : public ::testing::Test { // Injects an RTCP packet into the receiver. // Returns 0 for OK, non-0 for failure. int InjectRtcpPacket(const uint8_t* packet, - uint16_t packet_len) { + uint16_t packet_len) { RTCPUtility::RTCPParserV2 rtcpParser(packet, packet_len, true); // Allow non-compound RTCP RTCPHelp::RTCPPacketInformation rtcpPacketInformation; - int result = rtcp_receiver_->IncomingRTCPPacket(rtcpPacketInformation, - &rtcpParser); + EXPECT_EQ(0, rtcp_receiver_->IncomingRTCPPacket(rtcpPacketInformation, + &rtcpParser)); + rtcp_receiver_->TriggerCallbacksFromRTCPPacket(rtcpPacketInformation); // The NACK list is on purpose not copied below as it isn't needed by the // test. rtcp_packet_info_.rtcpPacketTypeFlags = @@ -255,7 +311,11 @@ class RtcpReceiverTest : public ::testing::Test { rtcp_packet_info_.ntp_secs = rtcpPacketInformation.ntp_secs; rtcp_packet_info_.ntp_frac = rtcpPacketInformation.ntp_frac; rtcp_packet_info_.rtp_timestamp = rtcpPacketInformation.rtp_timestamp; - return result; + rtcp_packet_info_.xr_dlrr_item = rtcpPacketInformation.xr_dlrr_item; + if (rtcpPacketInformation.VoIPMetric) { + rtcp_packet_info_.AddVoIPMetric(rtcpPacketInformation.VoIPMetric); + } + return 0; } OverUseDetectorOptions over_use_detector_options_; @@ -287,6 +347,191 @@ TEST_F(RtcpReceiverTest, InjectSrPacket) { kRtcpSr & rtcp_packet_info_.rtcpPacketTypeFlags); } +TEST_F(RtcpReceiverTest, XrPacketWithZeroReportBlocksIgnored) { + PacketBuilder p; + p.AddXrHeader(0x2345); + EXPECT_EQ(0, InjectRtcpPacket(p.packet(), p.length())); + EXPECT_EQ(0U, rtcp_packet_info_.rtcpPacketTypeFlags); +} + +TEST_F(RtcpReceiverTest, InjectXrVoipPacket) { + const uint32_t kSourceSsrc = 0x123456; + std::set ssrcs; + ssrcs.insert(kSourceSsrc); + rtcp_receiver_->SetSsrcs(kSourceSsrc, ssrcs); + + const uint8_t kLossRate = 123; + PacketBuilder p; + p.AddXrHeader(0x2345); + p.AddXrVoipBlock(kSourceSsrc, kLossRate); + EXPECT_EQ(0, InjectRtcpPacket(p.packet(), p.length())); + ASSERT_TRUE(rtcp_packet_info_.VoIPMetric != NULL); + EXPECT_EQ(kLossRate, rtcp_packet_info_.VoIPMetric->lossRate); + EXPECT_EQ(kRtcpXrVoipMetric, rtcp_packet_info_.rtcpPacketTypeFlags); +} + +TEST_F(RtcpReceiverTest, InjectXrReceiverReferenceTimePacket) { + PacketBuilder p; + p.AddXrHeader(0x2345); + p.AddXrReceiverReferenceTimeBlock(0x10203, 0x40506); + EXPECT_EQ(0, InjectRtcpPacket(p.packet(), p.length())); + EXPECT_EQ(kRtcpXrReceiverReferenceTime, + rtcp_packet_info_.rtcpPacketTypeFlags); +} + +TEST_F(RtcpReceiverTest, InjectXrDlrrPacketWithNoSubBlock) { + const uint32_t kSourceSsrc = 0x123456; + std::set ssrcs; + ssrcs.insert(kSourceSsrc); + rtcp_receiver_->SetSsrcs(kSourceSsrc, ssrcs); + std::vector remote_ssrcs; + + PacketBuilder p; + p.AddXrHeader(0x2345); + p.AddXrDlrrBlock(remote_ssrcs); + EXPECT_EQ(0, InjectRtcpPacket(p.packet(), p.length())); + EXPECT_EQ(0U, rtcp_packet_info_.rtcpPacketTypeFlags); + EXPECT_FALSE(rtcp_packet_info_.xr_dlrr_item); +} + +TEST_F(RtcpReceiverTest, XrDlrrPacketNotToUsIgnored) { + const uint32_t kSourceSsrc = 0x123456; + std::set ssrcs; + ssrcs.insert(kSourceSsrc); + rtcp_receiver_->SetSsrcs(kSourceSsrc, ssrcs); + std::vector remote_ssrcs; + remote_ssrcs.push_back(kSourceSsrc+1); + + PacketBuilder p; + p.AddXrHeader(0x2345); + p.AddXrDlrrBlock(remote_ssrcs); + EXPECT_EQ(0, InjectRtcpPacket(p.packet(), p.length())); + EXPECT_EQ(0U, rtcp_packet_info_.rtcpPacketTypeFlags); + EXPECT_FALSE(rtcp_packet_info_.xr_dlrr_item); +} + +TEST_F(RtcpReceiverTest, InjectXrDlrrPacketWithSubBlock) { + const uint32_t kSourceSsrc = 0x123456; + std::set ssrcs; + ssrcs.insert(kSourceSsrc); + rtcp_receiver_->SetSsrcs(kSourceSsrc, ssrcs); + std::vector remote_ssrcs; + remote_ssrcs.push_back(kSourceSsrc); + + PacketBuilder p; + p.AddXrHeader(0x2345); + p.AddXrDlrrBlock(remote_ssrcs); + EXPECT_EQ(0, InjectRtcpPacket(p.packet(), p.length())); + // The parser should note the DLRR report block item, but not flag the packet + // since the RTT is not estimated. + EXPECT_TRUE(rtcp_packet_info_.xr_dlrr_item); +} + +TEST_F(RtcpReceiverTest, InjectXrDlrrPacketWithMultipleSubBlocks) { + const uint32_t kSourceSsrc = 0x123456; + std::set ssrcs; + ssrcs.insert(kSourceSsrc); + rtcp_receiver_->SetSsrcs(kSourceSsrc, ssrcs); + std::vector remote_ssrcs; + remote_ssrcs.push_back(kSourceSsrc+2); + remote_ssrcs.push_back(kSourceSsrc+1); + remote_ssrcs.push_back(kSourceSsrc); + + PacketBuilder p; + p.AddXrHeader(0x2345); + p.AddXrDlrrBlock(remote_ssrcs); + EXPECT_EQ(0, InjectRtcpPacket(p.packet(), p.length())); + // The parser should note the DLRR report block item, but not flag the packet + // since the RTT is not estimated. + EXPECT_TRUE(rtcp_packet_info_.xr_dlrr_item); +} + +TEST_F(RtcpReceiverTest, InjectXrPacketWithMultipleReportBlocks) { + const uint8_t kLossRate = 123; + const uint32_t kSourceSsrc = 0x123456; + std::set ssrcs; + ssrcs.insert(kSourceSsrc); + rtcp_receiver_->SetSsrcs(kSourceSsrc, ssrcs); + std::vector remote_ssrcs; + remote_ssrcs.push_back(kSourceSsrc); + + PacketBuilder p; + p.AddXrHeader(0x2345); + p.AddXrDlrrBlock(remote_ssrcs); + p.AddXrVoipBlock(kSourceSsrc, kLossRate); + p.AddXrReceiverReferenceTimeBlock(0x10203, 0x40506); + + EXPECT_EQ(0, InjectRtcpPacket(p.packet(), p.length())); + EXPECT_EQ(static_cast(kRtcpXrReceiverReferenceTime + + kRtcpXrVoipMetric), + rtcp_packet_info_.rtcpPacketTypeFlags); + // The parser should note the DLRR report block item, but not flag the packet + // since the RTT is not estimated. + EXPECT_TRUE(rtcp_packet_info_.xr_dlrr_item); +} + +TEST_F(RtcpReceiverTest, InjectXrPacketWithUnknownReportBlock) { + const uint8_t kLossRate = 123; + const uint32_t kSourceSsrc = 0x123456; + std::set ssrcs; + ssrcs.insert(kSourceSsrc); + rtcp_receiver_->SetSsrcs(kSourceSsrc, ssrcs); + std::vector remote_ssrcs; + remote_ssrcs.push_back(kSourceSsrc); + + PacketBuilder p; + p.AddXrHeader(0x2345); + p.AddXrVoipBlock(kSourceSsrc, kLossRate); + p.AddXrUnknownBlock(); + p.AddXrReceiverReferenceTimeBlock(0x10203, 0x40506); + + EXPECT_EQ(0, InjectRtcpPacket(p.packet(), p.length())); + EXPECT_EQ(static_cast(kRtcpXrReceiverReferenceTime + + kRtcpXrVoipMetric), + rtcp_packet_info_.rtcpPacketTypeFlags); +} + +TEST(RtcpUtilityTest, MidNtp) { + const uint32_t kNtpSec = 0x12345678; + const uint32_t kNtpFrac = 0x23456789; + const uint32_t kNtpMid = 0x56782345; + EXPECT_EQ(kNtpMid, RTCPUtility::MidNtp(kNtpSec, kNtpFrac)); +} + +TEST_F(RtcpReceiverTest, TestXrRrRttInitiallyFalse) { + uint16_t rtt_ms; + EXPECT_FALSE(rtcp_receiver_->GetAndResetXrRrRtt(&rtt_ms)); +} + +TEST_F(RtcpReceiverTest, LastReceivedXrReferenceTimeInfoInitiallyFalse) { + RtcpReceiveTimeInfo info; + EXPECT_FALSE(rtcp_receiver_->LastReceivedXrReferenceTimeInfo(&info)); +} + +TEST_F(RtcpReceiverTest, GetLastReceivedXrReferenceTimeInfo) { + const uint32_t kSenderSsrc = 0x123456; + const uint32_t kNtpSec = 0x10203; + const uint32_t kNtpFrac = 0x40506; + const uint32_t kNtpMid = RTCPUtility::MidNtp(kNtpSec, kNtpFrac); + + PacketBuilder p; + p.AddXrHeader(kSenderSsrc); + p.AddXrReceiverReferenceTimeBlock(kNtpSec, kNtpFrac); + EXPECT_EQ(0, InjectRtcpPacket(p.packet(), p.length())); + EXPECT_EQ(kRtcpXrReceiverReferenceTime, + rtcp_packet_info_.rtcpPacketTypeFlags); + + RtcpReceiveTimeInfo info; + EXPECT_TRUE(rtcp_receiver_->LastReceivedXrReferenceTimeInfo(&info)); + EXPECT_EQ(kSenderSsrc, info.sourceSSRC); + EXPECT_EQ(kNtpMid, info.lastRR); + EXPECT_EQ(0U, info.delaySinceLastRR); + + system_clock_.AdvanceTimeMilliseconds(1000); + EXPECT_TRUE(rtcp_receiver_->LastReceivedXrReferenceTimeInfo(&info)); + EXPECT_EQ(65536U, info.delaySinceLastRR); +} + TEST_F(RtcpReceiverTest, ReceiveReportTimeout) { const uint32_t kSenderSsrc = 0x10203; const uint32_t kSourceSsrc = 0x40506; @@ -305,7 +550,7 @@ TEST_F(RtcpReceiverTest, ReceiveReportTimeout) { // Add a RR and advance the clock just enough to not trigger a timeout. PacketBuilder p1; - p1.AddRrPacket(kSenderSsrc, kSourceSsrc, sequence_number, 0, 0); + p1.AddRrPacket(kSenderSsrc, kSourceSsrc, sequence_number, 0, 0, 0); EXPECT_EQ(0, InjectRtcpPacket(p1.packet(), p1.length())); system_clock_.AdvanceTimeMilliseconds(3 * kRtcpIntervalMs - 1); EXPECT_FALSE(rtcp_receiver_->RtcpRrTimeout(kRtcpIntervalMs)); @@ -314,7 +559,7 @@ TEST_F(RtcpReceiverTest, ReceiveReportTimeout) { // Add a RR with the same extended max as the previous RR to trigger a // sequence number timeout, but not a RR timeout. PacketBuilder p2; - p2.AddRrPacket(kSenderSsrc, kSourceSsrc, sequence_number, 0, 0); + p2.AddRrPacket(kSenderSsrc, kSourceSsrc, sequence_number, 0, 0, 0); EXPECT_EQ(0, InjectRtcpPacket(p2.packet(), p2.length())); system_clock_.AdvanceTimeMilliseconds(2); EXPECT_FALSE(rtcp_receiver_->RtcpRrTimeout(kRtcpIntervalMs)); @@ -332,7 +577,7 @@ TEST_F(RtcpReceiverTest, ReceiveReportTimeout) { // Add a new RR with increase sequence number to reset timers. PacketBuilder p3; sequence_number++; - p2.AddRrPacket(kSenderSsrc, kSourceSsrc, sequence_number, 0, 0); + p2.AddRrPacket(kSenderSsrc, kSourceSsrc, sequence_number, 0, 0, 0); EXPECT_EQ(0, InjectRtcpPacket(p2.packet(), p2.length())); EXPECT_FALSE(rtcp_receiver_->RtcpRrTimeout(kRtcpIntervalMs)); EXPECT_FALSE(rtcp_receiver_->RtcpRrSequenceNumberTimeout(kRtcpIntervalMs)); @@ -340,7 +585,7 @@ TEST_F(RtcpReceiverTest, ReceiveReportTimeout) { // Verify we can get a timeout again once we've received new RR. system_clock_.AdvanceTimeMilliseconds(2 * kRtcpIntervalMs); PacketBuilder p4; - p4.AddRrPacket(kSenderSsrc, kSourceSsrc, sequence_number, 0, 0); + p4.AddRrPacket(kSenderSsrc, kSourceSsrc, sequence_number, 0, 0, 0); EXPECT_EQ(0, InjectRtcpPacket(p4.packet(), p4.length())); system_clock_.AdvanceTimeMilliseconds(kRtcpIntervalMs + 1); EXPECT_FALSE(rtcp_receiver_->RtcpRrTimeout(kRtcpIntervalMs)); @@ -366,9 +611,9 @@ TEST_F(RtcpReceiverTest, TwoReportBlocks) { PacketBuilder packet; std::list report_blocks; report_blocks.push_back(PacketBuilder::ReportBlock( - kSourceSsrcs[0], sequence_numbers[0], 10, 5)); + kSourceSsrcs[0], sequence_numbers[0], 10, 5, 0)); report_blocks.push_back(PacketBuilder::ReportBlock( - kSourceSsrcs[1], sequence_numbers[1], 0, 0)); + kSourceSsrcs[1], sequence_numbers[1], 0, 0, 0)); packet.AddRrPacketMultipleReportBlocks(kSenderSsrc, report_blocks); EXPECT_EQ(0, InjectRtcpPacket(packet.packet(), packet.length())); ASSERT_EQ(2u, rtcp_packet_info_.report_blocks.size()); @@ -378,9 +623,9 @@ TEST_F(RtcpReceiverTest, TwoReportBlocks) { PacketBuilder packet2; report_blocks.clear(); report_blocks.push_back(PacketBuilder::ReportBlock( - kSourceSsrcs[0], sequence_numbers[0], 0, 0)); + kSourceSsrcs[0], sequence_numbers[0], 0, 0, 0)); report_blocks.push_back(PacketBuilder::ReportBlock( - kSourceSsrcs[1], sequence_numbers[1], 20, 10)); + kSourceSsrcs[1], sequence_numbers[1], 20, 10, 0)); packet2.AddRrPacketMultipleReportBlocks(kSenderSsrc, report_blocks); EXPECT_EQ(0, InjectRtcpPacket(packet2.packet(), packet2.length())); ASSERT_EQ(2u, rtcp_packet_info_.report_blocks.size()); @@ -498,6 +743,61 @@ TEST_F(RtcpReceiverTest, TmmbrThreeConstraintsTimeOut) { EXPECT_EQ(kMediaRecipientSsrc + 2, candidate_set.Ssrc(0)); } +TEST_F(RtcpReceiverTest, Callbacks) { + class RtcpCallbackImpl : public RtcpStatisticsCallback { + public: + RtcpCallbackImpl() : RtcpStatisticsCallback(), ssrc_(0) {} + virtual ~RtcpCallbackImpl() {} + + virtual void StatisticsUpdated(const RtcpStatistics& statistics, + uint32_t ssrc) { + stats_ = statistics; + ssrc_ = ssrc; + } + + bool Matches(uint32_t ssrc, uint32_t extended_max, uint8_t fraction_loss, + uint32_t cumulative_loss, uint32_t jitter) { + return ssrc_ == ssrc && + stats_.fraction_lost == fraction_loss && + stats_.cumulative_lost == cumulative_loss && + stats_.extended_max_sequence_number == extended_max && + stats_.jitter == jitter; + } + + RtcpStatistics stats_; + uint32_t ssrc_; + } callback; + + rtcp_receiver_->RegisterRtcpStatisticsCallback(&callback); + + const uint32_t kSenderSsrc = 0x10203; + const uint32_t kSourceSsrc = 0x123456; + const uint8_t fraction_loss = 3; + const uint32_t cumulative_loss = 7; + const uint32_t jitter = 9; + uint32_t sequence_number = 1234; + + std::set ssrcs; + ssrcs.insert(kSourceSsrc); + rtcp_receiver_->SetSsrcs(kSourceSsrc, ssrcs); + + // First packet, all numbers should just propagate + PacketBuilder p1; + p1.AddRrPacket(kSenderSsrc, kSourceSsrc, sequence_number, + fraction_loss, cumulative_loss, jitter); + EXPECT_EQ(0, InjectRtcpPacket(p1.packet(), p1.length())); + EXPECT_TRUE(callback.Matches(kSourceSsrc, sequence_number, fraction_loss, + cumulative_loss, jitter)); + + rtcp_receiver_->RegisterRtcpStatisticsCallback(NULL); + + // Add arbitrary numbers, callback should not be called (retain old values) + PacketBuilder p2; + p2.AddRrPacket(kSenderSsrc, kSourceSsrc, sequence_number + 1, 42, 137, 4711); + EXPECT_EQ(0, InjectRtcpPacket(p2.packet(), p2.length())); + EXPECT_TRUE(callback.Matches(kSourceSsrc, sequence_number, fraction_loss, + cumulative_loss, jitter)); +} } // Anonymous namespace diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc index e52c0d5569f5..aa50d65b2472 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc @@ -77,6 +77,8 @@ RTCPSender::FeedbackState::FeedbackState(ModuleRtpRtcpImpl* module) last_rr_ntp_frac = last_ntp_frac; remote_sr = last_remote_sr; + has_last_xr_rr = module->LastReceivedXrReferenceTimeInfo(&last_xr_rr); + uint32_t send_bitrate = 0, tmp; module->BitrateSent(&send_bitrate, &tmp, &tmp, &tmp); this->send_bitrate = send_bitrate; @@ -90,7 +92,8 @@ RTCPSender::FeedbackState::FeedbackState() send_bitrate(0), last_rr_ntp_secs(0), last_rr_ntp_frac(0), - remote_sr(0) {} + remote_sr(0), + has_last_xr_rr(false) {} RTCPSender::RTCPSender(const int32_t id, const bool audio, @@ -130,6 +133,8 @@ RTCPSender::RTCPSender(const int32_t id, _lastSRPacketCount(), _lastSROctetCount(), + last_xr_rr_(), + _CSRCs(0), _CSRC(), _includeCSRCs(true), @@ -150,6 +155,8 @@ RTCPSender::RTCPSender(const int32_t id, _appName(), _appData(NULL), _appLength(0), + + xrSendReceiverReferenceTimeEnabled_(false), _xrSendVoIPMetric(false), _xrVoIPMetric(), _nackCount(0), @@ -226,6 +233,8 @@ RTCPSender::Init() } _appLength = 0; + xrSendReceiverReferenceTimeEnabled_ = false; + _xrSendVoIPMetric = false; memset(&_xrVoIPMetric, 0, sizeof(_xrVoIPMetric)); @@ -234,6 +243,7 @@ RTCPSender::Init() memset(_lastRTCPTime, 0, sizeof(_lastRTCPTime)); memset(_lastSRPacketCount, 0, sizeof(_lastSRPacketCount)); memset(_lastSROctetCount, 0, sizeof(_lastSROctetCount)); + last_xr_rr_.clear(); _nackCount = 0; _pliCount = 0; @@ -603,6 +613,21 @@ RTCPSender::GetSendReportMetadata(const uint32_t sendReport, return false; } +bool RTCPSender::SendTimeOfXrRrReport(uint32_t mid_ntp, + int64_t* time_ms) const { + CriticalSectionScoped lock(_criticalSectionRTCPSender); + + if (last_xr_rr_.empty()) { + return false; + } + std::map::const_iterator it = last_xr_rr_.find(mid_ntp); + if (it == last_xr_rr_.end()) { + return false; + } + *time_ms = it->second; + return true; +} + int32_t RTCPSender::AddExternalReportBlock( uint32_t SSRC, const RTCPReportBlock* reportBlock) { @@ -1518,6 +1543,107 @@ RTCPSender::BuildBYE(uint8_t* rtcpbuffer, int& pos) return 0; } +int32_t RTCPSender::BuildReceiverReferenceTime(uint8_t* buffer, + int& pos, + uint32_t ntp_sec, + uint32_t ntp_frac) { + const int kRrTimeBlockLength = 20; + if (pos + kRrTimeBlockLength >= IP_PACKET_SIZE) { + return -2; + } + + if (last_xr_rr_.size() >= RTCP_NUMBER_OF_SR) { + last_xr_rr_.erase(last_xr_rr_.begin()); + } + last_xr_rr_.insert(std::pair( + RTCPUtility::MidNtp(ntp_sec, ntp_frac), + Clock::NtpToMs(ntp_sec, ntp_frac))); + + // Add XR header. + buffer[pos++] = 0x80; + buffer[pos++] = 207; + buffer[pos++] = 0; // XR packet length. + buffer[pos++] = 4; // XR packet length. + + // Add our own SSRC. + ModuleRTPUtility::AssignUWord32ToBuffer(buffer + pos, _SSRC); + pos += 4; + + // 0 1 2 3 + // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // | BT=4 | reserved | block length = 2 | + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // | NTP timestamp, most significant word | + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // | NTP timestamp, least significant word | + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + + // Add Receiver Reference Time Report block. + buffer[pos++] = 4; // BT. + buffer[pos++] = 0; // Reserved. + buffer[pos++] = 0; // Block length. + buffer[pos++] = 2; // Block length. + + // NTP timestamp. + ModuleRTPUtility::AssignUWord32ToBuffer(buffer + pos, ntp_sec); + pos += 4; + ModuleRTPUtility::AssignUWord32ToBuffer(buffer + pos, ntp_frac); + pos += 4; + + return 0; +} + +int32_t RTCPSender::BuildDlrr(uint8_t* buffer, + int& pos, + const RtcpReceiveTimeInfo& info) { + const int kDlrrBlockLength = 24; + if (pos + kDlrrBlockLength >= IP_PACKET_SIZE) { + return -2; + } + + // Add XR header. + buffer[pos++] = 0x80; + buffer[pos++] = 207; + buffer[pos++] = 0; // XR packet length. + buffer[pos++] = 5; // XR packet length. + + // Add our own SSRC. + ModuleRTPUtility::AssignUWord32ToBuffer(buffer + pos, _SSRC); + pos += 4; + + // 0 1 2 3 + // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // | BT=5 | reserved | block length | + // +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+ + // | SSRC_1 (SSRC of first receiver) | sub- + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ block + // | last RR (LRR) | 1 + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // | delay since last RR (DLRR) | + // +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+ + // | SSRC_2 (SSRC of second receiver) | sub- + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ block + // : ... : 2 + + // Add DLRR sub block. + buffer[pos++] = 5; // BT. + buffer[pos++] = 0; // Reserved. + buffer[pos++] = 0; // Block length. + buffer[pos++] = 3; // Block length. + + // NTP timestamp. + ModuleRTPUtility::AssignUWord32ToBuffer(buffer + pos, info.sourceSSRC); + pos += 4; + ModuleRTPUtility::AssignUWord32ToBuffer(buffer + pos, info.lastRR); + pos += 4; + ModuleRTPUtility::AssignUWord32ToBuffer(buffer + pos, info.delaySinceLastRR); + pos += 4; + + return 0; +} + int32_t RTCPSender::BuildVoIPMetric(uint8_t* rtcpbuffer, int& pos) { @@ -1668,7 +1794,17 @@ int RTCPSender::PrepareRTCP(const FeedbackState& feedback_state, rtcpPacketTypeFlags |= kRtcpTmmbn; _sendTMMBN = false; } - + if (rtcpPacketTypeFlags & kRtcpReport) + { + if (xrSendReceiverReferenceTimeEnabled_ && !_sending) + { + rtcpPacketTypeFlags |= kRtcpXrReceiverReferenceTime; + } + if (feedback_state.has_last_xr_rr) + { + rtcpPacketTypeFlags |= kRtcpXrDlrrReportBlock; + } + } if(_method == kRtcpCompound) { if(_sending) @@ -1743,7 +1879,6 @@ int RTCPSender::PrepareRTCP(const FeedbackState& feedback_state, if (_IJ && !statisticians.empty()) { rtcpPacketTypeFlags |= kRtcpTransmissionTimeOffset; } - _lastRTCPTime[0] = Clock::NtpToMs(NTPsec, NTPfrac); } } @@ -1909,6 +2044,27 @@ int RTCPSender::PrepareRTCP(const FeedbackState& feedback_state, return position; } } + if (rtcpPacketTypeFlags & kRtcpXrReceiverReferenceTime) + { + buildVal = BuildReceiverReferenceTime(rtcp_buffer, + position, + NTPsec, + NTPfrac); + if (buildVal == -1) { + return -1; + } else if (buildVal == -2) { + return position; + } + } + if (rtcpPacketTypeFlags & kRtcpXrDlrrReportBlock) + { + buildVal = BuildDlrr(rtcp_buffer, position, feedback_state.last_xr_rr); + if (buildVal == -1) { + return -1; + } else if (buildVal == -2) { + return position; + } + } return position; } @@ -1924,7 +2080,7 @@ bool RTCPSender::PrepareReport(const FeedbackState& feedback_state, RTCPReportBlock* report_block, uint32_t* ntp_secs, uint32_t* ntp_frac) { // Do we have receive statistics to send? - StreamStatistician::Statistics stats; + RtcpStatistics stats; if (!statistician->GetStatistics(&stats, true)) return false; report_block->fractionLost = stats.fraction_lost; @@ -2036,6 +2192,16 @@ RTCPSender::SetRTCPVoIPMetrics(const RTCPVoIPMetric* VoIPMetric) return 0; } +void RTCPSender::SendRtcpXrReceiverReferenceTime(bool enable) { + CriticalSectionScoped lock(_criticalSectionRTCPSender); + xrSendReceiverReferenceTimeEnabled_ = enable; +} + +bool RTCPSender::RtcpXrReceiverReferenceTime() const { + CriticalSectionScoped lock(_criticalSectionRTCPSender); + return xrSendReceiverReferenceTimeEnabled_; +} + // called under critsect _criticalSectionRTCPSender int32_t RTCPSender::WriteAllReportBlocksToBuffer( uint8_t* rtcpbuffer, diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_sender.h b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_sender.h index 48c218b89ff2..21222f374d80 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_sender.h +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_sender.h @@ -63,6 +63,9 @@ public: uint32_t last_rr_ntp_frac; uint32_t remote_sr; + bool has_last_xr_rr; + RtcpReceiveTimeInfo last_xr_rr; + // Used when generating TMMBR. ModuleRtpRtcpImpl* module; }; @@ -110,6 +113,8 @@ public: uint32_t *packetCount, uint64_t *octetCount); + bool SendTimeOfXrRrReport(uint32_t mid_ntp, int64_t* time_ms) const; + bool TimeToSendRTCPReport(const bool sendKeyframeBeforeRTP = false) const; uint32_t LastSendReport(uint32_t& lastRTCPTime); @@ -167,6 +172,10 @@ public: int32_t SetRTCPVoIPMetrics(const RTCPVoIPMetric* VoIPMetric); + void SendRtcpXrReceiverReferenceTime(bool enable); + + bool RtcpXrReceiverReferenceTime() const; + int32_t SetCSRCs(const uint32_t arrOfCSRC[kRtpCsrcSize], const uint8_t arrLength); @@ -253,6 +262,14 @@ private: const uint16_t* nackList, std::string* nackString); + int32_t BuildReceiverReferenceTime(uint8_t* buffer, + int& pos, + uint32_t ntp_sec, + uint32_t ntp_frac); + int32_t BuildDlrr(uint8_t* buffer, + int& pos, + const RtcpReceiveTimeInfo& info); + private: int32_t _id; const bool _audio; @@ -294,6 +311,10 @@ private: uint32_t _lastSRPacketCount[RTCP_NUMBER_OF_SR]; uint64_t _lastSROctetCount[RTCP_NUMBER_OF_SR]; + // Sent XR receiver reference time report. + // . + std::map last_xr_rr_; + // send CSRCs uint8_t _CSRCs; uint32_t _CSRC[kRtpCsrcSize]; @@ -319,6 +340,9 @@ private: uint8_t* _appData; uint16_t _appLength; + // True if sending of XR Receiver reference time report is enabled. + bool xrSendReceiverReferenceTimeEnabled_; + // XR VoIP metric bool _xrSendVoIPMetric; RTCPVoIPMetric _xrVoIPMetric; diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc index d8d4390374a7..a8b5275fa200 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc @@ -272,32 +272,35 @@ class TestTransport : public Transport, class RtcpSenderTest : public ::testing::Test { protected: + static const uint32_t kRemoteBitrateEstimatorMinBitrateBps = 30000; + RtcpSenderTest() : over_use_detector_options_(), - system_clock_(Clock::GetRealTimeClock()), + clock_(1335900000), rtp_payload_registry_(new RTPPayloadRegistry( 0, RTPPayloadStrategy::CreateStrategy(false))), remote_bitrate_observer_(), remote_bitrate_estimator_( RemoteBitrateEstimatorFactory().Create( &remote_bitrate_observer_, - system_clock_)), - receive_statistics_(ReceiveStatistics::Create(system_clock_)) { + &clock_, + kRemoteBitrateEstimatorMinBitrateBps)), + receive_statistics_(ReceiveStatistics::Create(&clock_)) { test_transport_ = new TestTransport(); RtpRtcp::Configuration configuration; configuration.id = 0; configuration.audio = false; - configuration.clock = system_clock_; + configuration.clock = &clock_; configuration.outgoing_transport = test_transport_; configuration.remote_bitrate_estimator = remote_bitrate_estimator_.get(); rtp_rtcp_impl_ = new ModuleRtpRtcpImpl(configuration); rtp_receiver_.reset(RtpReceiver::CreateVideoReceiver( - 0, system_clock_, test_transport_, NULL, rtp_payload_registry_.get())); + 0, &clock_, test_transport_, NULL, rtp_payload_registry_.get())); rtcp_sender_ = - new RTCPSender(0, false, system_clock_, receive_statistics_.get()); - rtcp_receiver_ = new RTCPReceiver(0, system_clock_, rtp_rtcp_impl_); + new RTCPSender(0, false, &clock_, receive_statistics_.get()); + rtcp_receiver_ = new RTCPReceiver(0, &clock_, rtp_rtcp_impl_); test_transport_->SetRTCPReceiver(rtcp_receiver_); // Initialize EXPECT_EQ(0, rtcp_sender_->Init()); @@ -317,7 +320,7 @@ class RtcpSenderTest : public ::testing::Test { } OverUseDetectorOptions over_use_detector_options_; - Clock* system_clock_; + SimulatedClock clock_; scoped_ptr rtp_payload_registry_; scoped_ptr rtp_receiver_; ModuleRtpRtcpImpl* rtp_rtcp_impl_; @@ -397,6 +400,70 @@ TEST_F(RtcpSenderTest, TestCompound_NoRtpReceived) { kRtcpTransmissionTimeOffset); } +TEST_F(RtcpSenderTest, TestXrReceiverReferenceTime) { + EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpCompound)); + RTCPSender::FeedbackState feedback_state(rtp_rtcp_impl_); + EXPECT_EQ(0, rtcp_sender_->SetSendingStatus(feedback_state, false)); + rtcp_sender_->SendRtcpXrReceiverReferenceTime(true); + EXPECT_EQ(0, rtcp_sender_->SendRTCP(feedback_state, kRtcpReport)); + + EXPECT_TRUE(test_transport_->rtcp_packet_info_.rtcpPacketTypeFlags & + kRtcpXrReceiverReferenceTime); +} + +TEST_F(RtcpSenderTest, TestNoXrReceiverReferenceTimeIfSending) { + EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpCompound)); + RTCPSender::FeedbackState feedback_state(rtp_rtcp_impl_); + EXPECT_EQ(0, rtcp_sender_->SetSendingStatus(feedback_state, true)); + rtcp_sender_->SendRtcpXrReceiverReferenceTime(true); + EXPECT_EQ(0, rtcp_sender_->SendRTCP(feedback_state, kRtcpReport)); + + EXPECT_FALSE(test_transport_->rtcp_packet_info_.rtcpPacketTypeFlags & + kRtcpXrReceiverReferenceTime); +} + +TEST_F(RtcpSenderTest, TestNoXrReceiverReferenceTimeIfNotEnabled) { + EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpCompound)); + RTCPSender::FeedbackState feedback_state(rtp_rtcp_impl_); + EXPECT_EQ(0, rtcp_sender_->SetSendingStatus(feedback_state, false)); + rtcp_sender_->SendRtcpXrReceiverReferenceTime(false); + EXPECT_EQ(0, rtcp_sender_->SendRTCP(feedback_state, kRtcpReport)); + + EXPECT_FALSE(test_transport_->rtcp_packet_info_.rtcpPacketTypeFlags & + kRtcpXrReceiverReferenceTime); +} + +TEST_F(RtcpSenderTest, TestSendTimeOfXrRrReport) { + EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpCompound)); + RTCPSender::FeedbackState feedback_state(rtp_rtcp_impl_); + EXPECT_EQ(0, rtcp_sender_->SetSendingStatus(feedback_state, false)); + rtcp_sender_->SendRtcpXrReceiverReferenceTime(true); + uint32_t ntp_sec; + uint32_t ntp_frac; + clock_.CurrentNtp(ntp_sec, ntp_frac); + uint32_t initial_mid_ntp = RTCPUtility::MidNtp(ntp_sec, ntp_frac); + + // No packet sent. + int64_t time_ms; + EXPECT_FALSE(rtcp_sender_->SendTimeOfXrRrReport(initial_mid_ntp, &time_ms)); + + // Send XR RR packets. + for (int i = 0; i <= RTCP_NUMBER_OF_SR; ++i) { + EXPECT_EQ(0, rtcp_sender_->SendRTCP(feedback_state, kRtcpReport)); + EXPECT_TRUE(test_transport_->rtcp_packet_info_.rtcpPacketTypeFlags & + kRtcpXrReceiverReferenceTime); + + clock_.CurrentNtp(ntp_sec, ntp_frac); + uint32_t mid_ntp = RTCPUtility::MidNtp(ntp_sec, ntp_frac); + EXPECT_TRUE(rtcp_sender_->SendTimeOfXrRrReport(mid_ntp, &time_ms)); + EXPECT_EQ(clock_.CurrentNtpInMilliseconds(), time_ms); + clock_.AdvanceTimeMilliseconds(1000); + } + + // The first report should no longer be stored. + EXPECT_FALSE(rtcp_sender_->SendTimeOfXrRrReport(initial_mid_ntp, &time_ms)); +} + // This test is written to verify actual behaviour. It does not seem // to make much sense to send an empty TMMBN, since there is no place // to put an actual limit here. It's just information that no limit diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc index 8a9e1c5e34c2..705a38b01619 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc @@ -15,8 +15,14 @@ #include // memcpy namespace webrtc { -// RTCPParserV2 : currently read only +namespace RTCPUtility { +uint32_t MidNtp(uint32_t ntp_sec, uint32_t ntp_frac) { + return (ntp_sec << 16) + (ntp_frac >> 16); +} // end RTCPUtility +} + +// RTCPParserV2 : currently read only RTCPUtility::RTCPParserV2::RTCPParserV2(const uint8_t* rtcpData, size_t rtcpDataLength, bool rtcpReducedSizeEnable) @@ -110,6 +116,12 @@ RTCPUtility::RTCPParserV2::Iterate() case State_PSFB_REMBItem: IteratePsfbREMBItem(); break; + case State_XRItem: + IterateXrItem(); + break; + case State_XR_DLLRItem: + IterateXrDlrrItem(); + break; case State_AppItem: IterateAppItem(); break; @@ -198,7 +210,6 @@ RTCPUtility::RTCPParserV2::IterateTopLevel() // Nothing supported found, continue to next block! break; } - return; } case PT_APP: @@ -213,7 +224,7 @@ RTCPUtility::RTCPParserV2::IterateTopLevel() } case PT_XR: { - const bool ok = ParseXR(); + const bool ok = ParseXr(); if (!ok) { // Nothing supported found, continue to next block! @@ -229,6 +240,26 @@ RTCPUtility::RTCPParserV2::IterateTopLevel() } } +void +RTCPUtility::RTCPParserV2::IterateXrItem() +{ + const bool success = ParseXrItem(); + if (!success) + { + Iterate(); + } +} + +void +RTCPUtility::RTCPParserV2::IterateXrDlrrItem() +{ + const bool success = ParseXrDlrrItem(); + if (!success) + { + Iterate(); + } +} + void RTCPUtility::RTCPParserV2::IterateReportBlockItem() { @@ -820,7 +851,7 @@ RTCPUtility::RTCPParserV2::ParseBYEItem() return true; } /* - 0 1 2 3 + 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ |V=2|P|reserved | PT=XR=207 | length | @@ -830,10 +861,9 @@ RTCPUtility::RTCPParserV2::ParseBYEItem() : report blocks : +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ -bool RTCPUtility::RTCPParserV2::ParseXR() +bool RTCPUtility::RTCPParserV2::ParseXr() { const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData; - if (length < 8) { EndCurrentBlock(); @@ -847,51 +877,154 @@ bool RTCPUtility::RTCPParserV2::ParseXR() _packet.XR.OriginatorSSRC += *_ptrRTCPData++ << 8; _packet.XR.OriginatorSSRC += *_ptrRTCPData++; - return ParseXRItem(); + _packetType = kRtcpXrHeaderCode; + _state = State_XRItem; + return true; } -/* + +/* Extended report block format (RFC 3611). + BT: block type. + block length: length of report block in 32-bits words minus one (including + the header). 0 1 2 3 - 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | BT | type-specific | block length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ : type-specific block contents : +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ +bool RTCPUtility::RTCPParserV2::ParseXrItem() { + const int kBlockHeaderLengthInBytes = 4; + const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData; + if (length < kBlockHeaderLengthInBytes) { + _state = State_TopLevel; + EndCurrentBlock(); + return false; + } -bool -RTCPUtility::RTCPParserV2::ParseXRItem() -{ - const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData; + uint8_t block_type = *_ptrRTCPData++; + _ptrRTCPData++; // Ignore reserved. - if (length < 4) // - { - EndCurrentBlock(); - return false; - } + uint16_t block_length_in_4bytes = *_ptrRTCPData++ << 8; + block_length_in_4bytes += *_ptrRTCPData++; - uint8_t blockType = *_ptrRTCPData++; - uint8_t typeSpecific = *_ptrRTCPData++; - - uint16_t blockLength = *_ptrRTCPData++ << 8; - blockLength = *_ptrRTCPData++; - - if(blockType == 7 && typeSpecific == 0) - { - if(blockLength != 8) - { - EndCurrentBlock(); - return false; - } - return ParseXRVOIPMetricItem(); - }else - { - EndCurrentBlock(); - return false; - } + switch (block_type) { + case kBtReceiverReferenceTime: + return ParseXrReceiverReferenceTimeItem(block_length_in_4bytes); + case kBtDlrr: + return ParseXrDlrr(block_length_in_4bytes); + case kBtVoipMetric: + return ParseXrVoipMetricItem(block_length_in_4bytes); + default: + return ParseXrUnsupportedBlockType(block_length_in_4bytes); + } } -/* - 0 1 2 3 + +/* Receiver Reference Time Report Block. + 0 1 2 3 + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | BT=4 | reserved | block length = 2 | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | NTP timestamp, most significant word | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | NTP timestamp, least significant word | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +*/ +bool RTCPUtility::RTCPParserV2::ParseXrReceiverReferenceTimeItem( + int block_length_4bytes) { + const int kBlockLengthIn4Bytes = 2; + const int kBlockLengthInBytes = kBlockLengthIn4Bytes * 4; + const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData; + if (block_length_4bytes != kBlockLengthIn4Bytes || + length < kBlockLengthInBytes) { + _state = State_TopLevel; + EndCurrentBlock(); + return false; + } + + _packet.XRReceiverReferenceTimeItem.NTPMostSignificant = *_ptrRTCPData++<<24; + _packet.XRReceiverReferenceTimeItem.NTPMostSignificant+= *_ptrRTCPData++<<16; + _packet.XRReceiverReferenceTimeItem.NTPMostSignificant+= *_ptrRTCPData++<<8; + _packet.XRReceiverReferenceTimeItem.NTPMostSignificant+= *_ptrRTCPData++; + + _packet.XRReceiverReferenceTimeItem.NTPLeastSignificant = *_ptrRTCPData++<<24; + _packet.XRReceiverReferenceTimeItem.NTPLeastSignificant+= *_ptrRTCPData++<<16; + _packet.XRReceiverReferenceTimeItem.NTPLeastSignificant+= *_ptrRTCPData++<<8; + _packet.XRReceiverReferenceTimeItem.NTPLeastSignificant+= *_ptrRTCPData++; + + _packetType = kRtcpXrReceiverReferenceTimeCode; + _state = State_XRItem; + return true; +} + +/* DLRR Report Block. + 0 1 2 3 + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | BT=5 | reserved | block length | + +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+ + | SSRC_1 (SSRC of first receiver) | sub- + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ block + | last RR (LRR) | 1 + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | delay since last RR (DLRR) | + +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+ + | SSRC_2 (SSRC of second receiver) | sub- + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ block + : ... : 2 + +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+ +*/ +bool RTCPUtility::RTCPParserV2::ParseXrDlrr(int block_length_4bytes) { + const int kSubBlockLengthIn4Bytes = 3; + if (block_length_4bytes < 0 || + (block_length_4bytes % kSubBlockLengthIn4Bytes) != 0) { + _state = State_TopLevel; + EndCurrentBlock(); + return false; + } + _packetType = kRtcpXrDlrrReportBlockCode; + _state = State_XR_DLLRItem; + _numberOfBlocks = block_length_4bytes / kSubBlockLengthIn4Bytes; + return true; +} + +bool RTCPUtility::RTCPParserV2::ParseXrDlrrItem() { + if (_numberOfBlocks == 0) { + _state = State_XRItem; + return false; + } + const int kSubBlockLengthInBytes = 12; + const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData; + if (length < kSubBlockLengthInBytes) { + _state = State_TopLevel; + EndCurrentBlock(); + return false; + } + + _packet.XRDLRRReportBlockItem.SSRC = *_ptrRTCPData++ << 24; + _packet.XRDLRRReportBlockItem.SSRC += *_ptrRTCPData++ << 16; + _packet.XRDLRRReportBlockItem.SSRC += *_ptrRTCPData++ << 8; + _packet.XRDLRRReportBlockItem.SSRC += *_ptrRTCPData++; + + _packet.XRDLRRReportBlockItem.LastRR = *_ptrRTCPData++ << 24; + _packet.XRDLRRReportBlockItem.LastRR += *_ptrRTCPData++ << 16; + _packet.XRDLRRReportBlockItem.LastRR += *_ptrRTCPData++ << 8; + _packet.XRDLRRReportBlockItem.LastRR += *_ptrRTCPData++; + + _packet.XRDLRRReportBlockItem.DelayLastRR = *_ptrRTCPData++ << 24; + _packet.XRDLRRReportBlockItem.DelayLastRR += *_ptrRTCPData++ << 16; + _packet.XRDLRRReportBlockItem.DelayLastRR += *_ptrRTCPData++ << 8; + _packet.XRDLRRReportBlockItem.DelayLastRR += *_ptrRTCPData++; + + _packetType = kRtcpXrDlrrReportBlockItemCode; + --_numberOfBlocks; + _state = State_XR_DLLRItem; + return true; +} +/* VoIP Metrics Report Block. + 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | BT=7 | reserved | block length = 8 | @@ -913,61 +1046,78 @@ RTCPUtility::RTCPParserV2::ParseXRItem() | JB maximum | JB abs max | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ -bool -RTCPUtility::RTCPParserV2::ParseXRVOIPMetricItem() -{ - const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData; - if (length < 28) - { - EndCurrentBlock(); - return false; - } - _packetType = kRtcpXrVoipMetricCode; +bool RTCPUtility::RTCPParserV2::ParseXrVoipMetricItem(int block_length_4bytes) { + const int kBlockLengthIn4Bytes = 8; + const int kBlockLengthInBytes = kBlockLengthIn4Bytes * 4; + const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData; + if (block_length_4bytes != kBlockLengthIn4Bytes || + length < kBlockLengthInBytes) { + _state = State_TopLevel; + EndCurrentBlock(); + return false; + } - _packet.XRVOIPMetricItem.SSRC = *_ptrRTCPData++ << 24; - _packet.XRVOIPMetricItem.SSRC += *_ptrRTCPData++ << 16; - _packet.XRVOIPMetricItem.SSRC += *_ptrRTCPData++ << 8; - _packet.XRVOIPMetricItem.SSRC += *_ptrRTCPData++; + _packet.XRVOIPMetricItem.SSRC = *_ptrRTCPData++ << 24; + _packet.XRVOIPMetricItem.SSRC += *_ptrRTCPData++ << 16; + _packet.XRVOIPMetricItem.SSRC += *_ptrRTCPData++ << 8; + _packet.XRVOIPMetricItem.SSRC += *_ptrRTCPData++; - _packet.XRVOIPMetricItem.lossRate = *_ptrRTCPData++; - _packet.XRVOIPMetricItem.discardRate = *_ptrRTCPData++; - _packet.XRVOIPMetricItem.burstDensity = *_ptrRTCPData++; - _packet.XRVOIPMetricItem.gapDensity = *_ptrRTCPData++; + _packet.XRVOIPMetricItem.lossRate = *_ptrRTCPData++; + _packet.XRVOIPMetricItem.discardRate = *_ptrRTCPData++; + _packet.XRVOIPMetricItem.burstDensity = *_ptrRTCPData++; + _packet.XRVOIPMetricItem.gapDensity = *_ptrRTCPData++; - _packet.XRVOIPMetricItem.burstDuration = *_ptrRTCPData++ << 8; - _packet.XRVOIPMetricItem.burstDuration += *_ptrRTCPData++; + _packet.XRVOIPMetricItem.burstDuration = *_ptrRTCPData++ << 8; + _packet.XRVOIPMetricItem.burstDuration += *_ptrRTCPData++; - _packet.XRVOIPMetricItem.gapDuration = *_ptrRTCPData++ << 8; - _packet.XRVOIPMetricItem.gapDuration += *_ptrRTCPData++; + _packet.XRVOIPMetricItem.gapDuration = *_ptrRTCPData++ << 8; + _packet.XRVOIPMetricItem.gapDuration += *_ptrRTCPData++; - _packet.XRVOIPMetricItem.roundTripDelay = *_ptrRTCPData++ << 8; - _packet.XRVOIPMetricItem.roundTripDelay += *_ptrRTCPData++; + _packet.XRVOIPMetricItem.roundTripDelay = *_ptrRTCPData++ << 8; + _packet.XRVOIPMetricItem.roundTripDelay += *_ptrRTCPData++; - _packet.XRVOIPMetricItem.endSystemDelay = *_ptrRTCPData++ << 8; - _packet.XRVOIPMetricItem.endSystemDelay += *_ptrRTCPData++; + _packet.XRVOIPMetricItem.endSystemDelay = *_ptrRTCPData++ << 8; + _packet.XRVOIPMetricItem.endSystemDelay += *_ptrRTCPData++; - _packet.XRVOIPMetricItem.signalLevel = *_ptrRTCPData++; - _packet.XRVOIPMetricItem.noiseLevel = *_ptrRTCPData++; - _packet.XRVOIPMetricItem.RERL = *_ptrRTCPData++; - _packet.XRVOIPMetricItem.Gmin = *_ptrRTCPData++; - _packet.XRVOIPMetricItem.Rfactor = *_ptrRTCPData++; - _packet.XRVOIPMetricItem.extRfactor = *_ptrRTCPData++; - _packet.XRVOIPMetricItem.MOSLQ = *_ptrRTCPData++; - _packet.XRVOIPMetricItem.MOSCQ = *_ptrRTCPData++; - _packet.XRVOIPMetricItem.RXconfig = *_ptrRTCPData++; - _ptrRTCPData++; // skip reserved + _packet.XRVOIPMetricItem.signalLevel = *_ptrRTCPData++; + _packet.XRVOIPMetricItem.noiseLevel = *_ptrRTCPData++; + _packet.XRVOIPMetricItem.RERL = *_ptrRTCPData++; + _packet.XRVOIPMetricItem.Gmin = *_ptrRTCPData++; + _packet.XRVOIPMetricItem.Rfactor = *_ptrRTCPData++; + _packet.XRVOIPMetricItem.extRfactor = *_ptrRTCPData++; + _packet.XRVOIPMetricItem.MOSLQ = *_ptrRTCPData++; + _packet.XRVOIPMetricItem.MOSCQ = *_ptrRTCPData++; + _packet.XRVOIPMetricItem.RXconfig = *_ptrRTCPData++; + _ptrRTCPData++; // skip reserved - _packet.XRVOIPMetricItem.JBnominal = *_ptrRTCPData++ << 8; - _packet.XRVOIPMetricItem.JBnominal += *_ptrRTCPData++; + _packet.XRVOIPMetricItem.JBnominal = *_ptrRTCPData++ << 8; + _packet.XRVOIPMetricItem.JBnominal += *_ptrRTCPData++; - _packet.XRVOIPMetricItem.JBmax = *_ptrRTCPData++ << 8; - _packet.XRVOIPMetricItem.JBmax += *_ptrRTCPData++; + _packet.XRVOIPMetricItem.JBmax = *_ptrRTCPData++ << 8; + _packet.XRVOIPMetricItem.JBmax += *_ptrRTCPData++; - _packet.XRVOIPMetricItem.JBabsMax = *_ptrRTCPData++ << 8; - _packet.XRVOIPMetricItem.JBabsMax += *_ptrRTCPData++; + _packet.XRVOIPMetricItem.JBabsMax = *_ptrRTCPData++ << 8; + _packet.XRVOIPMetricItem.JBabsMax += *_ptrRTCPData++; - return true; + _packetType = kRtcpXrVoipMetricCode; + _state = State_XRItem; + return true; +} + +bool RTCPUtility::RTCPParserV2::ParseXrUnsupportedBlockType( + int block_length_4bytes) { + const int32_t kBlockLengthInBytes = block_length_4bytes * 4; + const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData; + if (length < kBlockLengthInBytes) { + _state = State_TopLevel; + EndCurrentBlock(); + return false; + } + // Skip block. + _ptrRTCPData += kBlockLengthInBytes; + _state = State_XRItem; + return false; } bool diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_utility.h b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_utility.h index fa771ab93e32..f0867a75ef5f 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_utility.h +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_utility.h @@ -19,6 +19,8 @@ namespace webrtc { namespace RTCPUtility { + uint32_t MidNtp(uint32_t ntp_sec, uint32_t ntp_frac); + // CNAME struct RTCPCnameInformation { @@ -74,6 +76,19 @@ namespace RTCPUtility { // RFC 3611 uint32_t OriginatorSSRC; }; + struct RTCPPacketXRReceiverReferenceTimeItem + { + // RFC 3611 4.4 + uint32_t NTPMostSignificant; + uint32_t NTPLeastSignificant; + }; + struct RTCPPacketXRDLRRReportBlockItem + { + // RFC 3611 4.5 + uint32_t SSRC; + uint32_t LastRR; + uint32_t DelayLastRR; + }; struct RTCPPacketXRVOIPMetricItem { // RFC 3611 4.7 @@ -228,6 +243,8 @@ namespace RTCPUtility { RTCPPacketPSFBFIRItem FIRItem; RTCPPacketXR XR; + RTCPPacketXRReceiverReferenceTimeItem XRReceiverReferenceTimeItem; + RTCPPacketXRDLRRReportBlockItem XRDLRRReportBlockItem; RTCPPacketXRVOIPMetricItem XRVOIPMetricItem; RTCPPacketAPP APP; @@ -274,6 +291,10 @@ namespace RTCPUtility { kRtcpRtpfbSrReqCode, // RFC 3611 + kRtcpXrHeaderCode, + kRtcpXrReceiverReferenceTimeCode, + kRtcpXrDlrrReportBlockCode, + kRtcpXrDlrrReportBlockItemCode, kRtcpXrVoipMetricCode, kRtcpAppCode, @@ -314,6 +335,13 @@ namespace RTCPUtility { PT_XR = 207 }; + // Extended report blocks, RFC 3611. + enum RtcpXrBlockType { + kBtReceiverReferenceTime = 4, + kBtDlrr = 5, + kBtVoipMetric = 7 + }; + bool RTCPParseCommonHeader( const uint8_t* ptrDataBegin, const uint8_t* ptrDataEnd, RTCPCommonHeader& parsedHeader); @@ -353,6 +381,7 @@ namespace RTCPUtility { State_PSFB_AppItem, // Application specific FCI item State_PSFB_REMBItem, // Application specific REMB item State_XRItem, + State_XR_DLLRItem, State_AppItem }; @@ -371,6 +400,8 @@ namespace RTCPUtility { void IteratePsfbAppItem(); void IteratePsfbREMBItem(); void IterateAppItem(); + void IterateXrItem(); + void IterateXrDlrrItem(); void Validate(); void EndCurrentBlock(); @@ -389,9 +420,13 @@ namespace RTCPUtility { bool ParseIJ(); bool ParseIJItem(); - bool ParseXR(); - bool ParseXRItem(); - bool ParseXRVOIPMetricItem(); + bool ParseXr(); + bool ParseXrItem(); + bool ParseXrReceiverReferenceTimeItem(int block_length_4bytes); + bool ParseXrDlrr(int block_length_4bytes); + bool ParseXrDlrrItem(); + bool ParseXrVoipMetricItem(int block_length_4bytes); + bool ParseXrUnsupportedBlockType(int block_length_4bytes); bool ParseFBCommon(const RTCPCommonHeader& header); bool ParseNACKItem(); diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc index bcd1d10c0766..1d8b2be37886 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc @@ -8,19 +8,20 @@ * be found in the AUTHORS file in the root of the source tree. */ - /* * This file includes unit tests for the VP8 packetizer. */ #include "testing/gtest/include/gtest/gtest.h" - -#include "webrtc/system_wrappers/interface/compile_assert.h" - #include "webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h" #include "webrtc/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h" +#include "webrtc/system_wrappers/interface/compile_assert.h" #include "webrtc/typedefs.h" +#define CHECK_ARRAY_SIZE(expected_size, array) \ + COMPILE_ASSERT(expected_size == sizeof(array) / sizeof(array[0]), \ + check_array_size); + namespace webrtc { class RtpFormatVp8Test : public ::testing::Test { @@ -63,10 +64,8 @@ TEST_F(RtpFormatVp8Test, TestStrictMode) { const bool kExpectedFragStart[] = {true, false, true, true, false, false, false}; const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]); - COMPILE_ASSERT(kExpectedNum == - sizeof(kExpectedPart) / sizeof(kExpectedPart[0])); - COMPILE_ASSERT(kExpectedNum == - sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0])); + CHECK_ARRAY_SIZE(kExpectedNum, kExpectedPart); + CHECK_ARRAY_SIZE(kExpectedNum, kExpectedFragStart); helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart, kExpectedFragStart, kExpectedNum); @@ -91,10 +90,8 @@ TEST_F(RtpFormatVp8Test, TestAggregateMode) { const int kExpectedPart[] = {0, 0, 0, 1}; const bool kExpectedFragStart[] = {true, false, false, true}; const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]); - COMPILE_ASSERT(kExpectedNum == - sizeof(kExpectedPart) / sizeof(kExpectedPart[0])); - COMPILE_ASSERT(kExpectedNum == - sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0])); + CHECK_ARRAY_SIZE(kExpectedNum, kExpectedPart); + CHECK_ARRAY_SIZE(kExpectedNum, kExpectedFragStart); helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart, kExpectedFragStart, kExpectedNum); @@ -119,10 +116,8 @@ TEST_F(RtpFormatVp8Test, TestAggregateModeManyPartitions1) { const int kExpectedPart[] = {0, 0, 1, 5}; const bool kExpectedFragStart[] = {true, false, true, true}; const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]); - COMPILE_ASSERT(kExpectedNum == - sizeof(kExpectedPart) / sizeof(kExpectedPart[0])); - COMPILE_ASSERT(kExpectedNum == - sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0])); + CHECK_ARRAY_SIZE(kExpectedNum, kExpectedPart); + CHECK_ARRAY_SIZE(kExpectedNum, kExpectedFragStart); helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart, kExpectedFragStart, kExpectedNum); @@ -147,10 +142,8 @@ TEST_F(RtpFormatVp8Test, TestAggregateModeManyPartitions2) { const int kExpectedPart[] = {0, 0, 1, 4, 4, 5}; const bool kExpectedFragStart[] = {true, false, true, true, false, true}; const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]); - COMPILE_ASSERT(kExpectedNum == - sizeof(kExpectedPart) / sizeof(kExpectedPart[0])); - COMPILE_ASSERT(kExpectedNum == - sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0])); + CHECK_ARRAY_SIZE(kExpectedNum, kExpectedPart); + CHECK_ARRAY_SIZE(kExpectedNum, kExpectedFragStart); helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart, kExpectedFragStart, kExpectedNum); @@ -175,10 +168,8 @@ TEST_F(RtpFormatVp8Test, TestAggregateModeTwoLargePartitions) { const int kExpectedPart[] = {0, 0, 1, 1}; const bool kExpectedFragStart[] = {true, false, true, false}; const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]); - COMPILE_ASSERT(kExpectedNum == - sizeof(kExpectedPart) / sizeof(kExpectedPart[0])); - COMPILE_ASSERT(kExpectedNum == - sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0])); + CHECK_ARRAY_SIZE(kExpectedNum, kExpectedPart); + CHECK_ARRAY_SIZE(kExpectedNum, kExpectedFragStart); helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart, kExpectedFragStart, kExpectedNum); @@ -203,10 +194,8 @@ TEST_F(RtpFormatVp8Test, TestEqualSizeModeFallback) { // Frag start only true for first packet in equal size mode. const bool kExpectedFragStart[] = {true, false, false, false}; const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]); - COMPILE_ASSERT(kExpectedNum == - sizeof(kExpectedPart) / sizeof(kExpectedPart[0])); - COMPILE_ASSERT(kExpectedNum == - sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0])); + CHECK_ARRAY_SIZE(kExpectedNum, kExpectedPart); + CHECK_ARRAY_SIZE(kExpectedNum, kExpectedFragStart); helper_->set_sloppy_partitioning(true); helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart, @@ -232,10 +221,8 @@ TEST_F(RtpFormatVp8Test, TestNonReferenceBit) { // Frag start only true for first packet in equal size mode. const bool kExpectedFragStart[] = {true, false}; const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]); - COMPILE_ASSERT(kExpectedNum == - sizeof(kExpectedPart) / sizeof(kExpectedPart[0])); - COMPILE_ASSERT(kExpectedNum == - sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0])); + CHECK_ARRAY_SIZE(kExpectedNum, kExpectedPart); + CHECK_ARRAY_SIZE(kExpectedNum, kExpectedFragStart); helper_->set_sloppy_partitioning(true); helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart, @@ -265,10 +252,8 @@ TEST_F(RtpFormatVp8Test, TestTl0PicIdxAndTID) { const int kExpectedPart[1] = {0}; // Packet starts with partition 0. const bool kExpectedFragStart[1] = {true}; const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]); - COMPILE_ASSERT(kExpectedNum == - sizeof(kExpectedPart) / sizeof(kExpectedPart[0])); - COMPILE_ASSERT(kExpectedNum == - sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0])); + CHECK_ARRAY_SIZE(kExpectedNum, kExpectedPart); + CHECK_ARRAY_SIZE(kExpectedNum, kExpectedFragStart); helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart, kExpectedFragStart, kExpectedNum); @@ -295,10 +280,8 @@ TEST_F(RtpFormatVp8Test, TestKeyIdx) { const int kExpectedPart[1] = {0}; // Packet starts with partition 0. const bool kExpectedFragStart[1] = {true}; const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]); - COMPILE_ASSERT(kExpectedNum == - sizeof(kExpectedPart) / sizeof(kExpectedPart[0])); - COMPILE_ASSERT(kExpectedNum == - sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0])); + CHECK_ARRAY_SIZE(kExpectedNum, kExpectedPart); + CHECK_ARRAY_SIZE(kExpectedNum, kExpectedFragStart); helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart, kExpectedFragStart, kExpectedNum); @@ -326,10 +309,8 @@ TEST_F(RtpFormatVp8Test, TestTIDAndKeyIdx) { const int kExpectedPart[1] = {0}; // Packet starts with partition 0. const bool kExpectedFragStart[1] = {true}; const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]); - COMPILE_ASSERT(kExpectedNum == - sizeof(kExpectedPart) / sizeof(kExpectedPart[0])); - COMPILE_ASSERT(kExpectedNum == - sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0])); + CHECK_ARRAY_SIZE(kExpectedNum, kExpectedPart); + CHECK_ARRAY_SIZE(kExpectedNum, kExpectedFragStart); helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart, kExpectedFragStart, kExpectedNum); diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc index 56eba3b5714e..14ca821f7e50 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc @@ -11,7 +11,10 @@ #include "webrtc/modules/rtp_rtcp/source/rtp_packet_history.h" #include +#include #include // memset +#include +#include #include "webrtc/modules/rtp_rtcp/source/rtp_utility.h" #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" @@ -19,6 +22,8 @@ namespace webrtc { +enum { kMinPacketRequestBytes = 50 }; + RTPPacketHistory::RTPPacketHistory(Clock* clock) : clock_(clock), critsect_(CriticalSectionWrapper::CreateCriticalSection()), @@ -55,7 +60,7 @@ void RTPPacketHistory::Allocate(uint16_t number_to_store) { stored_seq_nums_.resize(number_to_store); stored_lengths_.resize(number_to_store); stored_times_.resize(number_to_store); - stored_resend_times_.resize(number_to_store); + stored_send_times_.resize(number_to_store); stored_types_.resize(number_to_store); } @@ -74,7 +79,7 @@ void RTPPacketHistory::Free() { stored_seq_nums_.clear(); stored_lengths_.clear(); stored_times_.clear(); - stored_resend_times_.clear(); + stored_send_times_.clear(); stored_types_.clear(); store_ = false; @@ -139,9 +144,9 @@ int32_t RTPPacketHistory::PutRTPPacket(const uint8_t* packet, stored_seq_nums_[prev_index_] = seq_num; stored_lengths_[prev_index_] = packet_length; - stored_times_[prev_index_] = - (capture_time_ms > 0) ? capture_time_ms : clock_->TimeInMilliseconds(); - stored_resend_times_[prev_index_] = 0; // packet not resent + stored_times_[prev_index_] = (capture_time_ms > 0) ? capture_time_ms : + clock_->TimeInMilliseconds(); + stored_send_times_[prev_index_] = 0; // Packet not sent. stored_types_[prev_index_] = type; ++prev_index_; @@ -211,12 +216,12 @@ bool RTPPacketHistory::HasRTPPacket(uint16_t sequence_number) const { return true; } -bool RTPPacketHistory::GetRTPPacket(uint16_t sequence_number, - uint32_t min_elapsed_time_ms, - uint8_t* packet, - uint16_t* packet_length, - int64_t* stored_time_ms, - StorageType* type) const { +bool RTPPacketHistory::GetPacketAndSetSendTime(uint16_t sequence_number, + uint32_t min_elapsed_time_ms, + bool retransmit, + uint8_t* packet, + uint16_t* packet_length, + int64_t* stored_time_ms) { CriticalSectionScoped cs(critsect_); if (!store_) { return false; @@ -237,46 +242,56 @@ bool RTPPacketHistory::GetRTPPacket(uint16_t sequence_number, return false; } - if (length > *packet_length) { + if (length > *packet_length) { WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1, - "Input buffer too short for packet %u", sequence_number); + "Input buffer too short for packet %u", sequence_number); return false; - } + } // Verify elapsed time since last retrieve. int64_t now = clock_->TimeInMilliseconds(); if (min_elapsed_time_ms > 0 && - ((now - stored_resend_times_.at(index)) < min_elapsed_time_ms)) { + ((now - stored_send_times_.at(index)) < min_elapsed_time_ms)) { WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, "Skip getting packet %u, packet recently resent.", sequence_number); - *packet_length = 0; - return true; + return false; } - // Get packet. - std::vector >::const_iterator it_found_packet = - stored_packets_.begin() + index; - std::copy(it_found_packet->begin(), it_found_packet->begin() + length, packet); - *packet_length = stored_lengths_.at(index); - *stored_time_ms = stored_times_.at(index); - *type = stored_types_.at(index); + if (retransmit && stored_types_.at(index) == kDontRetransmit) { + // No bytes copied since this packet shouldn't be retransmitted or is + // of zero size. + return false; + } + stored_send_times_[index] = clock_->TimeInMilliseconds(); + GetPacket(index, packet, packet_length, stored_time_ms); return true; } -void RTPPacketHistory::UpdateResendTime(uint16_t sequence_number) { - CriticalSectionScoped cs(critsect_); - if (!store_) { - return; - } +void RTPPacketHistory::GetPacket(int index, + uint8_t* packet, + uint16_t* packet_length, + int64_t* stored_time_ms) const { + // Get packet. + uint16_t length = stored_lengths_.at(index); + std::vector >::const_iterator it_found_packet = + stored_packets_.begin() + index; + std::copy(it_found_packet->begin(), it_found_packet->begin() + length, + packet); + *packet_length = length; + *stored_time_ms = stored_times_.at(index); +} - int32_t index = 0; - bool found = FindSeqNum(sequence_number, &index); - if (!found) { - WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1, - "Failed to update resend time, seq num: %u.", sequence_number); - return; - } - stored_resend_times_[index] = clock_->TimeInMilliseconds(); +bool RTPPacketHistory::GetBestFittingPacket(uint8_t* packet, + uint16_t* packet_length, + int64_t* stored_time_ms) { + CriticalSectionScoped cs(critsect_); + if (!store_) + return false; + int index = FindBestFittingPacket(*packet_length); + if (index < 0) + return false; + GetPacket(index, packet, packet_length, stored_time_ms); + return true; } // private, lock should already be taken @@ -313,4 +328,23 @@ bool RTPPacketHistory::FindSeqNum(uint16_t sequence_number, } return false; } + +int RTPPacketHistory::FindBestFittingPacket(uint16_t size) const { + if (size < kMinPacketRequestBytes || stored_lengths_.empty()) + return -1; + int min_diff = -1; + size_t best_index = 0; + for (size_t i = 0; i < stored_lengths_.size(); ++i) { + if (stored_lengths_[i] == 0) + continue; + int diff = abs(stored_lengths_[i] - size); + if (min_diff < 0 || diff < min_diff) { + min_diff = diff; + best_index = i; + } + } + if (min_diff < 0) + return -1; + return best_index; +} } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h index bf61c4aa4ac0..785e4992b0a7 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h @@ -59,22 +59,26 @@ class RTPPacketHistory { // copied. // stored_time_ms: returns the time when the packet was stored. // type: returns the storage type set in PutRTPPacket. - bool GetRTPPacket(uint16_t sequence_number, - uint32_t min_elapsed_time_ms, - uint8_t* packet, - uint16_t* packet_length, - int64_t* stored_time_ms, - StorageType* type) const; + bool GetPacketAndSetSendTime(uint16_t sequence_number, + uint32_t min_elapsed_time_ms, + bool retransmit, + uint8_t* packet, + uint16_t* packet_length, + int64_t* stored_time_ms); + + bool GetBestFittingPacket(uint8_t* packet, uint16_t* packet_length, + int64_t* stored_time_ms); bool HasRTPPacket(uint16_t sequence_number) const; - void UpdateResendTime(uint16_t sequence_number); - private: + void GetPacket(int index, uint8_t* packet, uint16_t* packet_length, + int64_t* stored_time_ms) const; void Allocate(uint16_t number_to_store); void Free(); void VerifyAndAllocatePacketLength(uint16_t packet_length); bool FindSeqNum(uint16_t sequence_number, int32_t* index) const; + int FindBestFittingPacket(uint16_t size) const; private: Clock* clock_; @@ -87,7 +91,7 @@ class RTPPacketHistory { std::vector stored_seq_nums_; std::vector stored_lengths_; std::vector stored_times_; - std::vector stored_resend_times_; + std::vector stored_send_times_; std::vector stored_types_; }; } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc index ed192ac3798c..1682b7c33872 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc @@ -74,8 +74,8 @@ TEST_F(RtpPacketHistoryTest, NoStoreStatus) { // Packet should not be stored. len = kMaxPacketLength; int64_t time; - StorageType type; - EXPECT_FALSE(hist_->GetRTPPacket(kSeqNum, 0, packet_, &len, &time, &type)); + EXPECT_FALSE(hist_->GetPacketAndSetSendTime(kSeqNum, 0, false, packet_, &len, + &time)); } TEST_F(RtpPacketHistoryTest, DontStore) { @@ -89,8 +89,8 @@ TEST_F(RtpPacketHistoryTest, DontStore) { // Packet should not be stored. len = kMaxPacketLength; int64_t time; - StorageType type; - EXPECT_FALSE(hist_->GetRTPPacket(kSeqNum, 0, packet_, &len, &time, &type)); + EXPECT_FALSE(hist_->GetPacketAndSetSendTime(kSeqNum, 0, false, packet_, &len, + &time)); } TEST_F(RtpPacketHistoryTest, PutRtpPacket_TooLargePacketLength) { @@ -112,17 +112,16 @@ TEST_F(RtpPacketHistoryTest, GetRtpPacket_TooSmallBuffer) { capture_time_ms, kAllowRetransmission)); uint16_t len_out = len - 1; int64_t time; - StorageType type; - EXPECT_FALSE(hist_->GetRTPPacket(kSeqNum, 0, packet_, &len_out, &time, - &type)); + EXPECT_FALSE(hist_->GetPacketAndSetSendTime(kSeqNum, 0, false, packet_, + &len_out, &time)); } TEST_F(RtpPacketHistoryTest, GetRtpPacket_NotStored) { hist_->SetStorePacketsStatus(true, 10); uint16_t len = kMaxPacketLength; int64_t time; - StorageType type; - EXPECT_FALSE(hist_->GetRTPPacket(0, 0, packet_, &len, &time, &type)); + EXPECT_FALSE(hist_->GetPacketAndSetSendTime(0, 0, false, packet_, &len, + &time)); } TEST_F(RtpPacketHistoryTest, PutRtpPacket) { @@ -147,11 +146,9 @@ TEST_F(RtpPacketHistoryTest, GetRtpPacket) { uint16_t len_out = kMaxPacketLength; int64_t time; - StorageType type; - EXPECT_TRUE(hist_->GetRTPPacket(kSeqNum, 0, packet_out_, &len_out, &time, - &type)); + EXPECT_TRUE(hist_->GetPacketAndSetSendTime(kSeqNum, 0, false, packet_out_, + &len_out, &time)); EXPECT_EQ(len, len_out); - EXPECT_EQ(kAllowRetransmission, type); EXPECT_EQ(capture_time_ms, time); for (int i = 0; i < len; i++) { EXPECT_EQ(packet_[i], packet_out_[i]); @@ -176,11 +173,9 @@ TEST_F(RtpPacketHistoryTest, ReplaceRtpHeader) { uint16_t len_out = kMaxPacketLength; int64_t time; - StorageType type; - EXPECT_TRUE(hist_->GetRTPPacket(kSeqNum, 0, packet_out_, &len_out, &time, - &type)); + EXPECT_TRUE(hist_->GetPacketAndSetSendTime(kSeqNum, 0, false, packet_out_, + &len_out, &time)); EXPECT_EQ(len, len_out); - EXPECT_EQ(kAllowRetransmission, type); EXPECT_EQ(capture_time_ms, time); for (int i = 0; i < len; i++) { EXPECT_EQ(packet_[i], packet_out_[i]); @@ -207,11 +202,9 @@ TEST_F(RtpPacketHistoryTest, NoCaptureTime) { uint16_t len_out = kMaxPacketLength; int64_t time; - StorageType type; - EXPECT_TRUE(hist_->GetRTPPacket(kSeqNum, 0, packet_out_, &len_out, &time, - &type)); + EXPECT_TRUE(hist_->GetPacketAndSetSendTime(kSeqNum, 0, false, packet_out_, + &len_out, &time)); EXPECT_EQ(len, len_out); - EXPECT_EQ(kAllowRetransmission, type); EXPECT_EQ(capture_time_ms, time); for (int i = 0; i < len; i++) { EXPECT_EQ(packet_[i], packet_out_[i]); @@ -228,11 +221,9 @@ TEST_F(RtpPacketHistoryTest, DontRetransmit) { uint16_t len_out = kMaxPacketLength; int64_t time; - StorageType type; - EXPECT_TRUE(hist_->GetRTPPacket(kSeqNum, 0, packet_out_, &len_out, &time, - &type)); + EXPECT_TRUE(hist_->GetPacketAndSetSendTime(kSeqNum, 0, false, packet_out_, + &len_out, &time)); EXPECT_EQ(len, len_out); - EXPECT_EQ(kDontRetransmit, type); EXPECT_EQ(capture_time_ms, time); } @@ -244,20 +235,21 @@ TEST_F(RtpPacketHistoryTest, MinResendTime) { EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength, capture_time_ms, kAllowRetransmission)); - hist_->UpdateResendTime(kSeqNum); + int64_t time; + EXPECT_TRUE(hist_->GetPacketAndSetSendTime(kSeqNum, 100, false, packet_, &len, + &time)); fake_clock_.AdvanceTimeMilliseconds(100); // Time has elapsed. len = kMaxPacketLength; - StorageType type; - int64_t time; - EXPECT_TRUE(hist_->GetRTPPacket(kSeqNum, 100, packet_, &len, &time, &type)); + EXPECT_TRUE(hist_->GetPacketAndSetSendTime(kSeqNum, 100, false, packet_, &len, + &time)); EXPECT_GT(len, 0); EXPECT_EQ(capture_time_ms, time); // Time has not elapsed. Packet should be found, but no bytes copied. len = kMaxPacketLength; - EXPECT_TRUE(hist_->GetRTPPacket(kSeqNum, 101, packet_, &len, &time, &type)); - EXPECT_EQ(0, len); + EXPECT_FALSE(hist_->GetPacketAndSetSendTime(kSeqNum, 101, false, packet_, + &len, &time)); } } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc index 8ef10741f128..96fa80ad8425 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc @@ -38,7 +38,7 @@ class RtpPayloadRegistryTest : public ::testing::Test { protected: ModuleRTPUtility::Payload* ExpectReturnOfTypicalAudioPayload( - uint8_t payload_type, int rate) { + uint8_t payload_type, uint32_t rate) { bool audio = true; ModuleRTPUtility::Payload returned_payload = { "name", audio, { // Initialize the audio struct in this case. diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.cc index 04cfe1440759..134548518ea7 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.cc @@ -198,12 +198,10 @@ int32_t RTPReceiverAudio::ParseRtpPacket(WebRtcRTPHeader* rtp_header, rtp_header->type.Audio.arrOfEnergy, rtp_header->type.Audio.numEnergy); } - const uint16_t payload_data_length = payload_length - - rtp_header->header.paddingLength; return ParseAudioCodecSpecific(rtp_header, payload, - payload_data_length, + payload_length, specific_payload.Audio, is_red); } diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_rtcp.gypi b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_rtcp.gypi index f2cfcacc7ea7..59908396ef66 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_rtcp.gypi +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_rtcp.gypi @@ -16,16 +16,6 @@ '<(webrtc_root)/modules/modules.gyp:paced_sender', '<(webrtc_root)/modules/modules.gyp:remote_bitrate_estimator', ], - 'include_dirs': [ - '../interface', - '../../interface', - ], - 'direct_dependent_settings': { - 'include_dirs': [ - '../interface', - '../../interface', - ], - }, 'sources': [ # Common '../interface/fec_receiver.h', @@ -37,6 +27,7 @@ '../interface/rtp_rtcp_defines.h', 'bitrate.cc', 'bitrate.h', + 'byte_io.h', 'fec_receiver_impl.cc', 'fec_receiver_impl.h', 'receive_statistics_impl.cc', diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc index 9228210af1e6..9f1c0d832552 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc @@ -22,13 +22,6 @@ extern MatlabEngine eng; // Global variable defined elsewhere. #endif -// Local for this file. -namespace { - -const float kFracMs = 4.294967296E6f; - -} // namespace - #ifdef _WIN32 // Disable warning C4355: 'this' : used in base member initializer list. #pragma warning(disable : 4355) @@ -46,7 +39,7 @@ RtpRtcp::Configuration::Configuration() rtcp_feedback(NULL), intra_frame_callback(NULL), bandwidth_callback(NULL), - rtt_observer(NULL), + rtt_stats(NULL), audio_messages(NullObjectRtpAudioFeedback()), remote_bitrate_estimator(NULL), paced_sender(NULL) { @@ -99,7 +92,9 @@ ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration) #ifdef MATLAB , plot1_(NULL), #endif - rtt_observer_(configuration.rtt_observer) { + rtt_stats_(configuration.rtt_stats), + critical_section_rtt_(CriticalSectionWrapper::CreateCriticalSection()), + rtt_ms_(0) { send_video_codec_.codecType = kVideoCodecUnknown; if (default_module_) { @@ -196,20 +191,13 @@ int32_t ModuleRtpRtcpImpl::Process() { last_bitrate_process_time_ = now; } - bool default_instance = false; - { - CriticalSectionScoped cs(critical_section_module_ptrs_.get()); - if (!child_modules_.empty()) - default_instance = true; - } - if (!default_instance) { + if (!IsDefaultModule()) { + bool process_rtt = now >= last_rtt_process_time_ + kRtpRtcpRttProcessTimeMs; if (rtcp_sender_.Sending()) { // Process RTT if we have received a receiver report and we haven't // processed RTT for at least |kRtpRtcpRttProcessTimeMs| milliseconds. if (rtcp_receiver_.LastReceivedReceiverReport() > - last_rtt_process_time_ && now >= last_rtt_process_time_ + - kRtpRtcpRttProcessTimeMs) { - last_rtt_process_time_ = now; + last_rtt_process_time_ && process_rtt) { std::vector receive_blocks; rtcp_receiver_.StatisticsReceived(&receive_blocks); uint16_t max_rtt = 0; @@ -220,8 +208,8 @@ int32_t ModuleRtpRtcpImpl::Process() { max_rtt = (rtt > max_rtt) ? rtt : max_rtt; } // Report the rtt. - if (rtt_observer_ && max_rtt != 0) - rtt_observer_->OnRttUpdate(max_rtt); + if (rtt_stats_ && max_rtt != 0) + rtt_stats_->OnRttUpdate(max_rtt); } // Verify receiver reports are delivered and the reported sequence number @@ -244,7 +232,24 @@ int32_t ModuleRtpRtcpImpl::Process() { rtcp_sender_.SetTargetBitrate(target_bitrate); } } + } else { + // Report rtt from receiver. + if (process_rtt) { + uint16_t rtt_ms; + if (rtt_stats_ && rtcp_receiver_.GetAndResetXrRrRtt(&rtt_ms)) { + rtt_stats_->OnRttUpdate(rtt_ms); + } + } } + + // Get processed rtt. + if (process_rtt) { + last_rtt_process_time_ = now; + if (rtt_stats_) { + set_rtt_ms(rtt_stats_->LastProcessedRtt()); + } + } + if (rtcp_sender_.TimeToSendRTCPReport()) { RTCPSender::FeedbackState feedback_state(this); rtcp_sender_.SendRTCP(feedback_state, kRtcpReport); @@ -258,15 +263,13 @@ int32_t ModuleRtpRtcpImpl::Process() { return 0; } -int32_t ModuleRtpRtcpImpl::SetRTXSendStatus(RtxMode mode, bool set_ssrc, +int32_t ModuleRtpRtcpImpl::SetRTXSendStatus(int mode, bool set_ssrc, uint32_t ssrc) { rtp_sender_.SetRTXStatus(mode, set_ssrc, ssrc); - - return 0; } -int32_t ModuleRtpRtcpImpl::RTXSendStatus(RtxMode* mode, uint32_t* ssrc, +int32_t ModuleRtpRtcpImpl::RTXSendStatus(int* mode, uint32_t* ssrc, int* payload_type) const { rtp_sender_.RTXStatus(mode, ssrc, payload_type); return 0; @@ -341,7 +344,12 @@ int32_t ModuleRtpRtcpImpl::RegisterSendPayload( video_codec.plType); send_video_codec_ = video_codec; - simulcast_ = (video_codec.numberOfSimulcastStreams > 1) ? true : false; + { + // simulcast_ is accessed when accessing child_modules_, so this write needs + // to be protected by the same lock. + CriticalSectionScoped lock(critical_section_module_ptrs_.get()); + simulcast_ = video_codec.numberOfSimulcastStreams > 1; + } return rtp_sender_.RegisterPayload(video_codec.plName, video_codec.plType, 90000, @@ -440,9 +448,7 @@ int32_t ModuleRtpRtcpImpl::SetCSRCs( "SetCSRCs(arr_length:%d)", arr_length); - const bool default_instance(child_modules_.empty() ? false : true); - - if (default_instance) { + if (IsDefaultModule()) { // For default we need to update all child modules too. CriticalSectionScoped lock(critical_section_module_ptrs_.get()); @@ -467,20 +473,17 @@ int32_t ModuleRtpRtcpImpl::SetCSRCs( uint32_t ModuleRtpRtcpImpl::PacketCountSent() const { WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "PacketCountSent()"); - return rtp_sender_.Packets(); } uint32_t ModuleRtpRtcpImpl::ByteCountSent() const { WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "ByteCountSent()"); - return rtp_sender_.Bytes(); } int ModuleRtpRtcpImpl::CurrentSendFrequencyHz() const { WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "CurrentSendFrequencyHz()"); - return rtp_sender_.SendPayloadFrequency(); } @@ -523,7 +526,6 @@ int32_t ModuleRtpRtcpImpl::SetSendingStatus(const bool sending) { bool ModuleRtpRtcpImpl::Sending() const { WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "Sending()"); - return rtcp_sender_.Sending(); } @@ -542,8 +544,7 @@ int32_t ModuleRtpRtcpImpl::SetSendingMediaStatus(const bool sending) { bool ModuleRtpRtcpImpl::SendingMedia() const { WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "Sending()"); - const bool have_child_modules(child_modules_.empty() ? false : true); - if (!have_child_modules) { + if (!IsDefaultModule()) { return rtp_sender_.SendingMedia(); } @@ -577,8 +578,7 @@ int32_t ModuleRtpRtcpImpl::SendOutgoingData( rtcp_sender_.SetLastRtpTime(time_stamp, capture_time_ms); - const bool have_child_modules(child_modules_.empty() ? false : true); - if (!have_child_modules) { + if (!IsDefaultModule()) { // Don't send RTCP from default module. if (rtcp_sender_.TimeToSendRTCPReport(kVideoFrameKey == frame_type)) { RTCPSender::FeedbackState feedback_state(this); @@ -595,12 +595,12 @@ int32_t ModuleRtpRtcpImpl::SendOutgoingData( &(rtp_video_hdr->codecHeader)); } int32_t ret_val = -1; + CriticalSectionScoped lock(critical_section_module_ptrs_.get()); if (simulcast_) { if (rtp_video_hdr == NULL) { return -1; } int idx = 0; - CriticalSectionScoped lock(critical_section_module_ptrs_.get()); std::list::iterator it = child_modules_.begin(); for (; idx < rtp_video_hdr->simulcastIdx; ++it) { if (it == child_modules_.end()) { @@ -633,7 +633,6 @@ int32_t ModuleRtpRtcpImpl::SendOutgoingData( fragmentation, rtp_video_hdr); } else { - CriticalSectionScoped lock(critical_section_module_ptrs_.get()); std::list::iterator it = child_modules_.begin(); // Send to all "child" modules while (it != child_modules_.end()) { @@ -655,7 +654,8 @@ int32_t ModuleRtpRtcpImpl::SendOutgoingData( bool ModuleRtpRtcpImpl::TimeToSendPacket(uint32_t ssrc, uint16_t sequence_number, - int64_t capture_time_ms) { + int64_t capture_time_ms, + bool retransmission) { WEBRTC_TRACE( kTraceStream, kTraceRtpRtcp, @@ -663,15 +663,11 @@ bool ModuleRtpRtcpImpl::TimeToSendPacket(uint32_t ssrc, "TimeToSendPacket(ssrc:0x%x sequence_number:%u capture_time_ms:%ll)", ssrc, sequence_number, capture_time_ms); - bool no_child_modules = false; - { - CriticalSectionScoped lock(critical_section_module_ptrs_.get()); - no_child_modules = child_modules_.empty(); - } - if (no_child_modules) { + if (!IsDefaultModule()) { // Don't send from default module. if (SendingMedia() && ssrc == rtp_sender_.SSRC()) { - return rtp_sender_.TimeToSendPacket(sequence_number, capture_time_ms); + return rtp_sender_.TimeToSendPacket(sequence_number, capture_time_ms, + retransmission); } } else { CriticalSectionScoped lock(critical_section_module_ptrs_.get()); @@ -679,7 +675,8 @@ bool ModuleRtpRtcpImpl::TimeToSendPacket(uint32_t ssrc, while (it != child_modules_.end()) { if ((*it)->SendingMedia() && ssrc == (*it)->rtp_sender_.SSRC()) { return (*it)->rtp_sender_.TimeToSendPacket(sequence_number, - capture_time_ms); + capture_time_ms, + retransmission); } ++it; } @@ -692,12 +689,7 @@ int ModuleRtpRtcpImpl::TimeToSendPadding(int bytes) { WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_, "TimeToSendPadding(bytes: %d)", bytes); - bool no_child_modules = false; - { - CriticalSectionScoped lock(critical_section_module_ptrs_.get()); - no_child_modules = child_modules_.empty(); - } - if (no_child_modules) { + if (!IsDefaultModule()) { // Don't send from default module. if (SendingMedia()) { return rtp_sender_.TimeToSendPadding(bytes); @@ -716,9 +708,20 @@ int ModuleRtpRtcpImpl::TimeToSendPadding(int bytes) { return 0; } +bool ModuleRtpRtcpImpl::GetSendSideDelay(int* avg_send_delay_ms, + int* max_send_delay_ms) const { + assert(avg_send_delay_ms); + assert(max_send_delay_ms); + + if (IsDefaultModule()) { + // This API is only supported for child modules. + return false; + } + return rtp_sender_.GetSendSideDelay(avg_send_delay_ms, max_send_delay_ms); +} + uint16_t ModuleRtpRtcpImpl::MaxPayloadLength() const { WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "MaxPayloadLength()"); - return rtp_sender_.MaxPayloadLength(); } @@ -731,8 +734,7 @@ uint16_t ModuleRtpRtcpImpl::MaxDataPayloadLength() const { // Assuming IP/UDP. uint16_t min_data_payload_length = IP_PACKET_SIZE - 28; - const bool default_instance(child_modules_.empty() ? false : true); - if (default_instance) { + if (IsDefaultModule()) { // For default we need to update all child modules too. CriticalSectionScoped lock(critical_section_module_ptrs_.get()); std::list::const_iterator it = @@ -801,7 +803,6 @@ int32_t ModuleRtpRtcpImpl::SetTransportOverhead( int32_t ModuleRtpRtcpImpl::SetMaxTransferUnit(const uint16_t mtu) { WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SetMaxTransferUnit(%u)", mtu); - if (mtu > IP_PACKET_SIZE) { WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, id_, "Invalid in argument to SetMaxTransferUnit(%u)", mtu); @@ -813,7 +814,6 @@ int32_t ModuleRtpRtcpImpl::SetMaxTransferUnit(const uint16_t mtu) { RTCPMethod ModuleRtpRtcpImpl::RTCP() const { WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RTCP()"); - if (rtcp_sender_.Status() != kRtcpOff) { return rtcp_receiver_.Status(); } @@ -824,7 +824,6 @@ RTCPMethod ModuleRtpRtcpImpl::RTCP() const { int32_t ModuleRtpRtcpImpl::SetRTCPStatus(const RTCPMethod method) { WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SetRTCPStatus(%d)", method); - if (rtcp_sender_.SetRTCPStatus(method) == 0) { return rtcp_receiver_.SetRTCPStatus(method); } @@ -852,7 +851,6 @@ int32_t ModuleRtpRtcpImpl::AddMixedCNAME( const char c_name[RTCP_CNAME_SIZE]) { WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "AddMixedCNAME(SSRC:%u)", ssrc); - return rtcp_sender_.AddMixedCNAME(ssrc, c_name); } @@ -867,7 +865,6 @@ int32_t ModuleRtpRtcpImpl::RemoteCNAME( char c_name[RTCP_CNAME_SIZE]) const { WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RemoteCNAME(SSRC:%u)", remote_ssrc); - return rtcp_receiver_.CNAME(remote_ssrc, c_name); } @@ -878,7 +875,6 @@ int32_t ModuleRtpRtcpImpl::RemoteNTP( uint32_t* rtcp_arrival_time_frac, uint32_t* rtcp_timestamp) const { WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RemoteNTP()"); - return rtcp_receiver_.NTP(received_ntpsecs, received_ntpfrac, rtcp_arrival_time_secs, @@ -893,7 +889,6 @@ int32_t ModuleRtpRtcpImpl::RTT(const uint32_t remote_ssrc, uint16_t* min_rtt, uint16_t* max_rtt) const { WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RTT()"); - return rtcp_receiver_.RTT(remote_ssrc, rtt, avg_rtt, min_rtt, max_rtt); } @@ -901,15 +896,9 @@ int32_t ModuleRtpRtcpImpl::RTT(const uint32_t remote_ssrc, int32_t ModuleRtpRtcpImpl::ResetRTT(const uint32_t remote_ssrc) { WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "ResetRTT(SSRC:%u)", remote_ssrc); - return rtcp_receiver_.ResetRTT(remote_ssrc); } -void ModuleRtpRtcpImpl:: SetRtt(uint32_t rtt) { - WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SetRtt(rtt: %u)", rtt); - rtcp_receiver_.SetRTT(static_cast(rtt)); -} - int32_t ModuleRtpRtcpImpl::GetReportBlockInfo(const uint32_t remote_ssrc, uint32_t* ntp_high, @@ -927,7 +916,6 @@ ModuleRtpRtcpImpl::GetReportBlockInfo(const uint32_t remote_ssrc, int32_t ModuleRtpRtcpImpl::ResetSendDataCountersRTP() { WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "ResetSendDataCountersRTP()"); - rtp_sender_.ResetDataCounters(); return 0; // TODO(pwestin): change to void. } @@ -949,7 +937,6 @@ int32_t ModuleRtpRtcpImpl::SetRTCPApplicationSpecificData( WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SetRTCPApplicationSpecificData(sub_type:%d name:0x%x)", sub_type, name); - return rtcp_sender_.SetApplicationSpecificData(sub_type, name, data, length); } @@ -961,11 +948,20 @@ int32_t ModuleRtpRtcpImpl::SetRTCPVoIPMetrics( return rtcp_sender_.SetRTCPVoIPMetrics(voip_metric); } +void ModuleRtpRtcpImpl::SetRtcpXrRrtrStatus(bool enable) { + WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, + "SetRtcpXrRrtrStatus(%s)", enable ? "true" : "false"); + return rtcp_sender_.SendRtcpXrReceiverReferenceTime(enable); +} + +bool ModuleRtpRtcpImpl::RtcpXrRrtrStatus() const { + return rtcp_sender_.RtcpXrReceiverReferenceTime(); +} + int32_t ModuleRtpRtcpImpl::DataCountersRTP( uint32_t* bytes_sent, uint32_t* packets_sent) const { WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_, "DataCountersRTP()"); - if (bytes_sent) { *bytes_sent = rtp_sender_.Bytes(); } @@ -977,7 +973,6 @@ int32_t ModuleRtpRtcpImpl::DataCountersRTP( int32_t ModuleRtpRtcpImpl::RemoteRTCPStat(RTCPSenderInfo* sender_info) { WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RemoteRTCPStat()"); - return rtcp_receiver_.SenderInfoReceived(sender_info); } @@ -985,7 +980,6 @@ int32_t ModuleRtpRtcpImpl::RemoteRTCPStat(RTCPSenderInfo* sender_info) { int32_t ModuleRtpRtcpImpl::RemoteRTCPStat( std::vector* receive_blocks) const { WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RemoteRTCPStat()"); - return rtcp_receiver_.StatisticsReceived(receive_blocks); } @@ -993,21 +987,18 @@ int32_t ModuleRtpRtcpImpl::AddRTCPReportBlock( const uint32_t ssrc, const RTCPReportBlock* report_block) { WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "AddRTCPReportBlock()"); - return rtcp_sender_.AddExternalReportBlock(ssrc, report_block); } int32_t ModuleRtpRtcpImpl::RemoveRTCPReportBlock( const uint32_t ssrc) { WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RemoveRTCPReportBlock()"); - return rtcp_sender_.RemoveExternalReportBlock(ssrc); } // (REMB) Receiver Estimated Max Bitrate. bool ModuleRtpRtcpImpl::REMB() const { WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "REMB()"); - return rtcp_sender_.REMB(); } @@ -1037,7 +1028,6 @@ int32_t ModuleRtpRtcpImpl::SetREMBData(const uint32_t bitrate, // (IJ) Extended jitter report. bool ModuleRtpRtcpImpl::IJ() const { WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "IJ()"); - return rtcp_sender_.IJ(); } @@ -1046,7 +1036,6 @@ int32_t ModuleRtpRtcpImpl::SetIJStatus(const bool enable) { kTraceRtpRtcp, id_, "SetIJStatus(%s)", enable ? "true" : "false"); - return rtcp_sender_.SetIJStatus(enable); } @@ -1064,7 +1053,6 @@ int32_t ModuleRtpRtcpImpl::DeregisterSendRtpHeaderExtension( // (TMMBR) Temporary Max Media Bit Rate. bool ModuleRtpRtcpImpl::TMMBR() const { WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "TMMBR()"); - return rtcp_sender_.TMMBR(); } @@ -1081,7 +1069,6 @@ int32_t ModuleRtpRtcpImpl::SetTMMBRStatus(const bool enable) { int32_t ModuleRtpRtcpImpl::SetTMMBN(const TMMBRSet* bounding_set) { WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SetTMMBN()"); - uint32_t max_bitrate_kbit = rtp_sender_.MaxConfiguredBitrateVideo() / 1000; return rtcp_sender_.SetTMMBN(bounding_set, max_bitrate_kbit); @@ -1115,10 +1102,13 @@ int32_t ModuleRtpRtcpImpl::SendNACK(const uint16_t* nack_list, id_, "SendNACK(size:%u)", size); - uint16_t avg_rtt = 0; - rtcp_receiver_.RTT(rtcp_receiver_.RemoteSSRC(), NULL, &avg_rtt, NULL, NULL); + // Use RTT from RtcpRttStats class if provided. + uint16_t rtt = rtt_ms(); + if (rtt == 0) { + rtcp_receiver_.RTT(rtcp_receiver_.RemoteSSRC(), NULL, &rtt, NULL, NULL); + } - int64_t wait_time = 5 + ((avg_rtt * 3) >> 1); // 5 + RTT * 1.5. + int64_t wait_time = 5 + ((rtt * 3) >> 1); // 5 + RTT * 1.5. if (wait_time == 5) { wait_time = 100; // During startup we don't have an RTT. } @@ -1181,6 +1171,16 @@ bool ModuleRtpRtcpImpl::StorePackets() const { return rtp_sender_.StorePackets(); } +void ModuleRtpRtcpImpl::RegisterSendChannelRtcpStatisticsCallback( + RtcpStatisticsCallback* callback) { + rtcp_receiver_.RegisterRtcpStatisticsCallback(callback); +} + +RtcpStatisticsCallback* ModuleRtpRtcpImpl:: + GetSendChannelRtcpStatisticsCallback() { + return rtcp_receiver_.GetRtcpStatisticsCallback(); +} + // Send a TelephoneEvent tone using RFC 2833 (4733). int32_t ModuleRtpRtcpImpl::SendTelephoneEventOutband( const uint8_t key, @@ -1189,7 +1189,6 @@ int32_t ModuleRtpRtcpImpl::SendTelephoneEventOutband( WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SendTelephoneEventOutband(key:%u, time_ms:%u, level:%u)", key, time_ms, level); - return rtp_sender_.SendTelephoneEvent(key, time_ms, level); } @@ -1200,7 +1199,6 @@ bool ModuleRtpRtcpImpl::SendTelephoneEventActive( kTraceRtpRtcp, id_, "SendTelephoneEventActive()"); - return rtp_sender_.SendTelephoneEventActive(&telephone_event); } @@ -1214,7 +1212,6 @@ int32_t ModuleRtpRtcpImpl::SetAudioPacketSize( id_, "SetAudioPacketSize(%u)", packet_size_samples); - return rtp_sender_.SetAudioPacketSize(packet_size_samples); } @@ -1228,14 +1225,12 @@ int32_t ModuleRtpRtcpImpl::SetRTPAudioLevelIndicationStatus( "SetRTPAudioLevelIndicationStatus(enable=%d, ID=%u)", enable, id); - return rtp_sender_.SetAudioLevelIndicationStatus(enable, id); } int32_t ModuleRtpRtcpImpl::GetRTPAudioLevelIndicationStatus( bool& enable, uint8_t& id) const { - WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, @@ -1261,7 +1256,6 @@ int32_t ModuleRtpRtcpImpl::SetSendREDPayloadType( id_, "SetSendREDPayloadType(%d)", payload_type); - return rtp_sender_.SetRED(payload_type); } @@ -1269,7 +1263,6 @@ int32_t ModuleRtpRtcpImpl::SetSendREDPayloadType( int32_t ModuleRtpRtcpImpl::SendREDPayloadType( int8_t& payload_type) const { WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SendREDPayloadType()"); - return rtp_sender_.RED(&payload_type); } @@ -1281,9 +1274,7 @@ void ModuleRtpRtcpImpl::SetTargetSendBitrate( const std::vector& stream_bitrates) { WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SetTargetSendBitrate: %ld streams", stream_bitrates.size()); - - const bool have_child_modules(child_modules_.empty() ? false : true); - if (have_child_modules) { + if (IsDefaultModule()) { CriticalSectionScoped lock(critical_section_module_ptrs_.get()); if (simulcast_) { std::list::iterator it = child_modules_.begin(); @@ -1296,7 +1287,8 @@ void ModuleRtpRtcpImpl::SetTargetSendBitrate( } } } else { - assert(stream_bitrates.size() == 1); + if (stream_bitrates.size() > 1) + return; std::list::iterator it = child_modules_.begin(); for (; it != child_modules_.end(); ++it) { RTPSender& rtp_sender = (*it)->rtp_sender_; @@ -1304,7 +1296,8 @@ void ModuleRtpRtcpImpl::SetTargetSendBitrate( } } } else { - assert(stream_bitrates.size() == 1); + if (stream_bitrates.size() > 1) + return; rtp_sender_.SetTargetSendBitrate(stream_bitrates[0]); } } @@ -1316,7 +1309,6 @@ int32_t ModuleRtpRtcpImpl::SetKeyFrameRequestMethod( id_, "SetKeyFrameRequestMethod(method:%u)", method); - key_frame_req_method_ = method; return 0; } @@ -1326,7 +1318,6 @@ int32_t ModuleRtpRtcpImpl::RequestKeyFrame() { kTraceRtpRtcp, id_, "RequestKeyFrame"); - switch (key_frame_req_method_) { case kKeyFrameReqFirRtp: return rtp_sender_.SendRTPIntraRequest(); @@ -1345,7 +1336,6 @@ int32_t ModuleRtpRtcpImpl::SendRTCPSliceLossIndication( id_, "SendRTCPSliceLossIndication (picture_id:%d)", picture_id); - RTCPSender::FeedbackState feedback_state(this); return rtcp_sender_.SendRTCP( feedback_state, kRtcpSli, 0, 0, false, picture_id); @@ -1357,11 +1347,8 @@ int32_t ModuleRtpRtcpImpl::SetCameraDelay(const int32_t delay_ms) { id_, "SetCameraDelay(%d)", delay_ms); - const bool default_instance(child_modules_.empty() ? false : true); - - if (default_instance) { + if (IsDefaultModule()) { CriticalSectionScoped lock(critical_section_module_ptrs_.get()); - std::list::iterator it = child_modules_.begin(); while (it != child_modules_.end()) { RtpRtcp* module = *it; @@ -1400,12 +1387,10 @@ int32_t ModuleRtpRtcpImpl::GenericFECStatus( bool& enable, uint8_t& payload_type_red, uint8_t& payload_type_fec) { - WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "GenericFECStatus()"); bool child_enabled = false; - const bool default_instance(child_modules_.empty() ? false : true); - if (default_instance) { + if (IsDefaultModule()) { // For default we need to check all child modules too. CriticalSectionScoped lock(critical_section_module_ptrs_.get()); std::list::iterator it = child_modules_.begin(); @@ -1438,8 +1423,7 @@ int32_t ModuleRtpRtcpImpl::GenericFECStatus( int32_t ModuleRtpRtcpImpl::SetFecParameters( const FecProtectionParams* delta_params, const FecProtectionParams* key_params) { - const bool default_instance(child_modules_.empty() ? false : true); - if (default_instance) { + if (IsDefaultModule()) { // For default we need to update all child modules too. CriticalSectionScoped lock(critical_section_module_ptrs_.get()); @@ -1484,9 +1468,7 @@ void ModuleRtpRtcpImpl::BitrateSent(uint32_t* total_rate, uint32_t* video_rate, uint32_t* fec_rate, uint32_t* nack_rate) const { - const bool default_instance(child_modules_.empty() ? false : true); - - if (default_instance) { + if (IsDefaultModule()) { // For default we need to update the send bitrate. CriticalSectionScoped lock(critical_section_module_ptrs_feedback_.get()); @@ -1526,7 +1508,7 @@ void ModuleRtpRtcpImpl::BitrateSent(uint32_t* total_rate, return; } if (total_rate != NULL) - *total_rate = rtp_sender_.BitrateLast(); + *total_rate = rtp_sender_.BitrateSent(); if (video_rate != NULL) *video_rate = rtp_sender_.VideoBitrateSent(); if (fec_rate != NULL) @@ -1535,7 +1517,16 @@ void ModuleRtpRtcpImpl::BitrateSent(uint32_t* total_rate, *nack_rate = rtp_sender_.NackOverheadRate(); } -// Bad state of RTP receiver request a keyframe. +void ModuleRtpRtcpImpl::RegisterVideoBitrateObserver( + BitrateStatisticsObserver* observer) { + assert(!IsDefaultModule()); + rtp_sender_.RegisterBitrateObserver(observer); +} + +BitrateStatisticsObserver* ModuleRtpRtcpImpl::GetVideoBitrateObserver() const { + return rtp_sender_.GetBitrateObserver(); +} + void ModuleRtpRtcpImpl::OnRequestIntraFrame() { RequestKeyFrame(); } @@ -1561,15 +1552,23 @@ bool ModuleRtpRtcpImpl::GetSendReportMetadata(const uint32_t send_report, octet_count); } +bool ModuleRtpRtcpImpl::SendTimeOfXrRrReport( + uint32_t mid_ntp, int64_t* time_ms) const { + return rtcp_sender_.SendTimeOfXrRrReport(mid_ntp, time_ms); +} + void ModuleRtpRtcpImpl::OnReceivedNACK( const std::list& nack_sequence_numbers) { if (!rtp_sender_.StorePackets() || nack_sequence_numbers.size() == 0) { return; } - uint16_t avg_rtt = 0; - rtcp_receiver_.RTT(rtcp_receiver_.RemoteSSRC(), NULL, &avg_rtt, NULL, NULL); - rtp_sender_.OnReceivedNACK(nack_sequence_numbers, avg_rtt); + // Use RTT from RtcpRttStats class if provided. + uint16_t rtt = rtt_ms(); + if (rtt == 0) { + rtcp_receiver_.RTT(rtcp_receiver_.RemoteSSRC(), NULL, &rtt, NULL, NULL); + } + rtp_sender_.OnReceivedNACK(nack_sequence_numbers, rtt); } int32_t ModuleRtpRtcpImpl::LastReceivedNTP( @@ -1591,6 +1590,11 @@ int32_t ModuleRtpRtcpImpl::LastReceivedNTP( return 0; } +bool ModuleRtpRtcpImpl::LastReceivedXrReferenceTimeInfo( + RtcpReceiveTimeInfo* info) const { + return rtcp_receiver_.LastReceivedXrReferenceTimeInfo(info); +} + bool ModuleRtpRtcpImpl::UpdateRTCPReceiveInformationTimers() { // If this returns true this channel has timed out. // Periodically check if this is true and if so call UpdateTMMBR. @@ -1613,7 +1617,7 @@ int64_t ModuleRtpRtcpImpl::RtcpReportInterval() { void ModuleRtpRtcpImpl::SetRtcpReceiverSsrcs(uint32_t main_ssrc) { std::set ssrcs; ssrcs.insert(main_ssrc); - RtxMode rtx_mode = kRtxOff; + int rtx_mode = kRtxOff; uint32_t rtx_ssrc = 0; int rtx_payload_type = 0; rtp_sender_.RTXStatus(&rtx_mode, &rtx_ssrc, &rtx_payload_type); @@ -1622,4 +1626,38 @@ void ModuleRtpRtcpImpl::SetRtcpReceiverSsrcs(uint32_t main_ssrc) { rtcp_receiver_.SetSsrcs(main_ssrc, ssrcs); } +void ModuleRtpRtcpImpl::set_rtt_ms(uint32_t rtt_ms) { + CriticalSectionScoped cs(critical_section_rtt_.get()); + rtt_ms_ = rtt_ms; +} + +uint32_t ModuleRtpRtcpImpl::rtt_ms() const { + CriticalSectionScoped cs(critical_section_rtt_.get()); + return rtt_ms_; +} + +void ModuleRtpRtcpImpl::RegisterSendChannelRtpStatisticsCallback( + StreamDataCountersCallback* callback) { + rtp_sender_.RegisterRtpStatisticsCallback(callback); +} + +StreamDataCountersCallback* + ModuleRtpRtcpImpl::GetSendChannelRtpStatisticsCallback() const { + return rtp_sender_.GetRtpStatisticsCallback(); +} + +void ModuleRtpRtcpImpl::RegisterSendFrameCountObserver( + FrameCountObserver* observer) { + rtp_sender_.RegisterFrameCountObserver(observer); +} + +FrameCountObserver* ModuleRtpRtcpImpl::GetSendFrameCountObserver() const { + return rtp_sender_.GetFrameCountObserver(); +} + +bool ModuleRtpRtcpImpl::IsDefaultModule() const { + CriticalSectionScoped cs(critical_section_module_ptrs_.get()); + return !child_modules_.empty(); +} + } // Namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h index 2e45f3216025..ea6aaaa3b829 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h @@ -19,6 +19,7 @@ #include "webrtc/modules/rtp_rtcp/source/rtcp_sender.h" #include "webrtc/modules/rtp_rtcp/source/rtp_sender.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/test/testsupport/gtest_prod_util.h" #ifdef MATLAB class MatlabPlot; @@ -94,11 +95,11 @@ class ModuleRtpRtcpImpl : public RtpRtcp { virtual uint32_t ByteCountSent() const; - virtual int32_t SetRTXSendStatus(const RtxMode mode, + virtual int32_t SetRTXSendStatus(const int mode, const bool set_ssrc, const uint32_t ssrc) OVERRIDE; - virtual int32_t RTXSendStatus(RtxMode* mode, uint32_t* ssrc, + virtual int32_t RTXSendStatus(int* mode, uint32_t* ssrc, int* payloadType) const OVERRIDE; @@ -126,11 +127,17 @@ class ModuleRtpRtcpImpl : public RtpRtcp { const RTPFragmentationHeader* fragmentation = NULL, const RTPVideoHeader* rtp_video_hdr = NULL) OVERRIDE; - virtual bool TimeToSendPacket(uint32_t ssrc, uint16_t sequence_number, - int64_t capture_time_ms) OVERRIDE; + virtual bool TimeToSendPacket(uint32_t ssrc, + uint16_t sequence_number, + int64_t capture_time_ms, + bool retransmission) OVERRIDE; // Returns the number of padding bytes actually sent, which can be more or // less than |bytes|. virtual int TimeToSendPadding(int bytes) OVERRIDE; + + virtual bool GetSendSideDelay(int* avg_send_delay_ms, + int* max_send_delay_ms) const OVERRIDE; + // RTCP part. // Get RTCP status. @@ -171,8 +178,6 @@ class ModuleRtpRtcpImpl : public RtpRtcp { // Reset RoundTripTime statistics. virtual int32_t ResetRTT(const uint32_t remote_ssrc) OVERRIDE; - virtual void SetRtt(uint32_t rtt) OVERRIDE; - virtual int32_t GetReportBlockInfo(const uint32_t remote_ssrc, uint32_t* ntp_high, uint32_t* ntp_low, @@ -251,6 +256,12 @@ class ModuleRtpRtcpImpl : public RtpRtcp { virtual bool StorePackets() const OVERRIDE; + // Called on receipt of RTCP report block from remote side. + virtual void RegisterSendChannelRtcpStatisticsCallback( + RtcpStatisticsCallback* callback) OVERRIDE; + virtual RtcpStatisticsCallback* + GetSendChannelRtcpStatisticsCallback() OVERRIDE; + // (APP) Application specific data. virtual int32_t SetRTCPApplicationSpecificData( const uint8_t sub_type, @@ -261,6 +272,11 @@ class ModuleRtpRtcpImpl : public RtpRtcp { // (XR) VOIP metric. virtual int32_t SetRTCPVoIPMetrics(const RTCPVoIPMetric* VoIPMetric) OVERRIDE; + // (XR) Receiver reference time report. + virtual void SetRtcpXrRrtrStatus(bool enable) OVERRIDE; + + virtual bool RtcpXrRrtrStatus() const OVERRIDE; + // Audio part. // Set audio packet size, used to determine when it's time to send a DTMF @@ -330,6 +346,8 @@ class ModuleRtpRtcpImpl : public RtpRtcp { uint32_t& NTPfrac, uint32_t& remote_sr); + virtual bool LastReceivedXrReferenceTimeInfo(RtcpReceiveTimeInfo* info) const; + virtual int32_t BoundingSet(bool& tmmbr_owner, TMMBRSet*& bounding_set_rec); virtual void BitrateSent(uint32_t* total_rate, @@ -337,15 +355,27 @@ class ModuleRtpRtcpImpl : public RtpRtcp { uint32_t* fec_rate, uint32_t* nackRate) const OVERRIDE; + virtual void RegisterVideoBitrateObserver(BitrateStatisticsObserver* observer) + OVERRIDE; + + virtual BitrateStatisticsObserver* GetVideoBitrateObserver() const OVERRIDE; + virtual bool GetSendReportMetadata(const uint32_t send_report, uint32_t *time_of_send, uint32_t *packet_count, uint64_t *octet_count); + virtual bool SendTimeOfXrRrReport(uint32_t mid_ntp, int64_t* time_ms) const; + // Good state of RTP receiver inform sender. virtual int32_t SendRTCPReferencePictureSelection( const uint64_t picture_id) OVERRIDE; + virtual void RegisterSendChannelRtpStatisticsCallback( + StreamDataCountersCallback* callback); + virtual StreamDataCountersCallback* + GetSendChannelRtpStatisticsCallback() const; + void OnReceivedTMMBR(); // Bad state of RTP receiver request a keyframe. @@ -362,6 +392,10 @@ class ModuleRtpRtcpImpl : public RtpRtcp { void OnRequestSendReport(); + virtual void RegisterSendFrameCountObserver( + FrameCountObserver* observer) OVERRIDE; + virtual FrameCountObserver* GetSendFrameCountObserver() const OVERRIDE; + protected: void RegisterChildModule(RtpRtcp* module); @@ -385,9 +419,16 @@ class ModuleRtpRtcpImpl : public RtpRtcp { Clock* clock_; private: + FRIEND_TEST_ALL_PREFIXES(RtpRtcpImplTest, Rtt); + FRIEND_TEST_ALL_PREFIXES(RtpRtcpImplTest, RttForReceiverOnly); int64_t RtcpReportInterval(); void SetRtcpReceiverSsrcs(uint32_t main_ssrc); + void set_rtt_ms(uint32_t rtt_ms); + uint32_t rtt_ms() const; + + bool IsDefaultModule() const; + int32_t id_; const bool audio_; bool collision_detected_; @@ -416,7 +457,11 @@ class ModuleRtpRtcpImpl : public RtpRtcp { MatlabPlot* plot1_; #endif - RtcpRttObserver* rtt_observer_; + RtcpRttStats* rtt_stats_; + + // The processed RTT from RtcpRttStats. + scoped_ptr critical_section_rtt_; + uint32_t rtt_ms_; }; } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc new file mode 100644 index 000000000000..0f288548342f --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc @@ -0,0 +1,175 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "testing/gmock/include/gmock/gmock.h" +#include "testing/gtest/include/gtest/gtest.h" + +#include "webrtc/common_types.h" +#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h" +#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h" + +namespace webrtc { +namespace { +const uint32_t kSenderSsrc = 0x12345; +const uint32_t kReceiverSsrc = 0x23456; +const uint32_t kOneWayNetworkDelayMs = 100; + +class RtcpRttStatsTestImpl : public RtcpRttStats { + public: + RtcpRttStatsTestImpl() : rtt_ms_(0) {} + virtual ~RtcpRttStatsTestImpl() {} + + virtual void OnRttUpdate(uint32_t rtt_ms) { + rtt_ms_ = rtt_ms; + } + virtual uint32_t LastProcessedRtt() const { + return rtt_ms_; + } + uint32_t rtt_ms_; +}; + +class SendTransport : public Transport, + public NullRtpData { + public: + SendTransport() : receiver_(NULL), clock_(NULL), delay_ms_(0) {} + + void SetRtpRtcpModule(ModuleRtpRtcpImpl* receiver) { + receiver_ = receiver; + } + void SimulateNetworkDelay(uint32_t delay_ms, SimulatedClock* clock) { + clock_ = clock; + delay_ms_ = delay_ms; + } + virtual int SendPacket(int /*ch*/, const void* /*data*/, int /*len*/) { + return -1; + } + virtual int SendRTCPPacket(int /*ch*/, const void *data, int len) { + if (clock_) { + clock_->AdvanceTimeMilliseconds(delay_ms_); + } + EXPECT_TRUE(receiver_ != NULL); + EXPECT_EQ(0, receiver_->IncomingRtcpPacket( + static_cast(data), len)); + return len; + } + ModuleRtpRtcpImpl* receiver_; + SimulatedClock* clock_; + uint32_t delay_ms_; +}; + +class RtpRtcpModule { + public: + RtpRtcpModule(SimulatedClock* clock) + : receive_statistics_(ReceiveStatistics::Create(clock)) { + RtpRtcp::Configuration config; + config.audio = false; + config.clock = clock; + config.outgoing_transport = &transport_; + config.receive_statistics = receive_statistics_.get(); + config.rtt_stats = &rtt_stats_; + + impl_.reset(new ModuleRtpRtcpImpl(config)); + EXPECT_EQ(0, impl_->SetRTCPStatus(kRtcpCompound)); + + transport_.SimulateNetworkDelay(kOneWayNetworkDelayMs, clock); + } + scoped_ptr receive_statistics_; + SendTransport transport_; + RtcpRttStatsTestImpl rtt_stats_; + scoped_ptr impl_; +}; +} // namespace + +class RtpRtcpImplTest : public ::testing::Test { + protected: + RtpRtcpImplTest() + : clock_(1335900000), + sender_(&clock_), + receiver_(&clock_) { + // Send module. + EXPECT_EQ(0, sender_.impl_->SetSendingStatus(true)); + EXPECT_EQ(0, sender_.impl_->SetSSRC(kSenderSsrc)); + sender_.impl_->SetRemoteSSRC(kReceiverSsrc); + // Receive module. + EXPECT_EQ(0, receiver_.impl_->SetSendingStatus(false)); + EXPECT_EQ(0, receiver_.impl_->SetSSRC(kReceiverSsrc)); + receiver_.impl_->SetRemoteSSRC(kSenderSsrc); + // Transport settings. + sender_.transport_.SetRtpRtcpModule(receiver_.impl_.get()); + receiver_.transport_.SetRtpRtcpModule(sender_.impl_.get()); + } + SimulatedClock clock_; + RtpRtcpModule sender_; + RtpRtcpModule receiver_; +}; + +TEST_F(RtpRtcpImplTest, Rtt) { + RTPHeader header = {}; + header.timestamp = 1; + header.sequenceNumber = 123; + header.ssrc = kSenderSsrc; + header.headerLength = 12; + receiver_.receive_statistics_->IncomingPacket(header, 100, false); + + // Sender module should send a SR. + EXPECT_EQ(0, sender_.impl_->SendRTCP(kRtcpReport)); + + // Receiver module should send a RR with a response to the last received SR. + clock_.AdvanceTimeMilliseconds(1000); + EXPECT_EQ(0, receiver_.impl_->SendRTCP(kRtcpReport)); + + // Verify RTT. + uint16_t rtt; + uint16_t avg_rtt; + uint16_t min_rtt; + uint16_t max_rtt; + EXPECT_EQ(0, + sender_.impl_->RTT(kReceiverSsrc, &rtt, &avg_rtt, &min_rtt, &max_rtt)); + EXPECT_EQ(2 * kOneWayNetworkDelayMs, rtt); + EXPECT_EQ(2 * kOneWayNetworkDelayMs, avg_rtt); + EXPECT_EQ(2 * kOneWayNetworkDelayMs, min_rtt); + EXPECT_EQ(2 * kOneWayNetworkDelayMs, max_rtt); + + // No RTT from other ssrc. + EXPECT_EQ(-1, + sender_.impl_->RTT(kReceiverSsrc+1, &rtt, &avg_rtt, &min_rtt, &max_rtt)); + + // Verify RTT from rtt_stats config. + EXPECT_EQ(0U, sender_.rtt_stats_.LastProcessedRtt()); + EXPECT_EQ(0U, sender_.impl_->rtt_ms()); + sender_.impl_->Process(); + EXPECT_EQ(2 * kOneWayNetworkDelayMs, sender_.rtt_stats_.LastProcessedRtt()); + EXPECT_EQ(2 * kOneWayNetworkDelayMs, sender_.impl_->rtt_ms()); +} + +TEST_F(RtpRtcpImplTest, SetRtcpXrRrtrStatus) { + EXPECT_FALSE(receiver_.impl_->RtcpXrRrtrStatus()); + receiver_.impl_->SetRtcpXrRrtrStatus(true); + EXPECT_TRUE(receiver_.impl_->RtcpXrRrtrStatus()); +} + +TEST_F(RtpRtcpImplTest, RttForReceiverOnly) { + receiver_.impl_->SetRtcpXrRrtrStatus(true); + + // Receiver module should send a Receiver time reference report (RTRR). + EXPECT_EQ(0, receiver_.impl_->SendRTCP(kRtcpReport)); + + // Sender module should send a response to the last received RTRR (DLRR). + clock_.AdvanceTimeMilliseconds(1000); + EXPECT_EQ(0, sender_.impl_->SendRTCP(kRtcpReport)); + + // Verify RTT. + EXPECT_EQ(0U, receiver_.rtt_stats_.LastProcessedRtt()); + EXPECT_EQ(0U, receiver_.impl_->rtt_ms()); + receiver_.impl_->Process(); + EXPECT_EQ(2 * kOneWayNetworkDelayMs, receiver_.rtt_stats_.LastProcessedRtt()); + EXPECT_EQ(2 * kOneWayNetworkDelayMs, receiver_.impl_->rtt_ms()); +} +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_sender.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_sender.cc index d4980f2eade9..0711356e7a6a 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_sender.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_sender.cc @@ -12,7 +12,6 @@ #include // srand -#include "webrtc/modules/rtp_rtcp/source/rtp_packet_history.h" #include "webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h" #include "webrtc/modules/rtp_rtcp/source/rtp_sender_video.h" #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" @@ -23,6 +22,7 @@ namespace webrtc { // Max in the RFC 3550 is 255 bytes, we limit it to be modulus 32 for SRTP. const int kMaxPaddingLength = 224; +const int kSendSideDelayWindowMs = 1000; namespace { @@ -33,36 +33,62 @@ const char* FrameTypeToString(const FrameType frame_type) { case kAudioFrameCN: return "audio_cn"; case kVideoFrameKey: return "video_key"; case kVideoFrameDelta: return "video_delta"; - case kVideoFrameGolden: return "video_golden"; - case kVideoFrameAltRef: return "video_altref"; } return ""; } } // namespace -RTPSender::RTPSender(const int32_t id, const bool audio, Clock *clock, - Transport *transport, RtpAudioFeedback *audio_feedback, - PacedSender *paced_sender) - : Bitrate(clock), id_(id), audio_configured_(audio), audio_(NULL), - video_(NULL), paced_sender_(paced_sender), +RTPSender::RTPSender(const int32_t id, + const bool audio, + Clock* clock, + Transport* transport, + RtpAudioFeedback* audio_feedback, + PacedSender* paced_sender) + : clock_(clock), + bitrate_sent_(clock, this), + id_(id), + audio_configured_(audio), + audio_(NULL), + video_(NULL), + paced_sender_(paced_sender), send_critsect_(CriticalSectionWrapper::CreateCriticalSection()), - transport_(transport), sending_media_(true), // Default to sending media. - max_payload_length_(IP_PACKET_SIZE - 28), // Default is IP-v4/UDP. - target_send_bitrate_(0), packet_over_head_(28), payload_type_(-1), - payload_type_map_(), rtp_header_extension_map_(), - transmission_time_offset_(0), absolute_send_time_(0), + transport_(transport), + sending_media_(true), // Default to sending media. + max_payload_length_(IP_PACKET_SIZE - 28), // Default is IP-v4/UDP. + target_send_bitrate_(0), + packet_over_head_(28), + payload_type_(-1), + payload_type_map_(), + rtp_header_extension_map_(), + transmission_time_offset_(0), + absolute_send_time_(0), // NACK. - nack_byte_count_times_(), nack_byte_count_(), nack_bitrate_(clock), - packet_history_(new RTPPacketHistory(clock)), + nack_byte_count_times_(), + nack_byte_count_(), + nack_bitrate_(clock, NULL), + packet_history_(clock), // Statistics statistics_crit_(CriticalSectionWrapper::CreateCriticalSection()), - packets_sent_(0), payload_bytes_sent_(0), start_time_stamp_forced_(false), - start_time_stamp_(0), ssrc_db_(*SSRCDatabase::GetSSRCDatabase()), - remote_ssrc_(0), sequence_number_forced_(false), ssrc_forced_(false), - timestamp_(0), capture_time_ms_(0), last_packet_marker_bit_(false), - num_csrcs_(0), csrcs_(), include_csrcs_(true), - rtx_(kRtxOff), payload_type_rtx_(-1) { + frame_count_observer_(NULL), + rtp_stats_callback_(NULL), + bitrate_callback_(NULL), + // RTP variables + start_time_stamp_forced_(false), + start_time_stamp_(0), + ssrc_db_(*SSRCDatabase::GetSSRCDatabase()), + remote_ssrc_(0), + sequence_number_forced_(false), + ssrc_forced_(false), + timestamp_(0), + capture_time_ms_(0), + last_timestamp_time_ms_(0), + last_packet_marker_bit_(false), + num_csrcs_(0), + csrcs_(), + include_csrcs_(true), + rtx_(kRtxOff), + payload_type_rtx_(-1) { memset(nack_byte_count_times_, 0, sizeof(nack_byte_count_times_)); memset(nack_byte_count_, 0, sizeof(nack_byte_count_)); memset(csrcs_, 0, sizeof(csrcs_)); @@ -97,7 +123,6 @@ RTPSender::~RTPSender() { delete it->second; payload_type_map_.erase(it); } - delete packet_history_; delete audio_; delete video_; @@ -109,7 +134,7 @@ void RTPSender::SetTargetSendBitrate(const uint32_t bits) { } uint16_t RTPSender::ActualSendBitrateKbit() const { - return (uint16_t)(Bitrate::BitrateNow() / 1000); + return (uint16_t)(bitrate_sent_.BitrateNow() / 1000); } uint32_t RTPSender::VideoBitrateSent() const { @@ -130,6 +155,23 @@ uint32_t RTPSender::NackOverheadRate() const { return nack_bitrate_.BitrateLast(); } +bool RTPSender::GetSendSideDelay(int* avg_send_delay_ms, + int* max_send_delay_ms) const { + CriticalSectionScoped cs(statistics_crit_.get()); + SendDelayMap::const_iterator it = send_delays_.upper_bound( + clock_->TimeInMilliseconds() - kSendSideDelayWindowMs); + if (!sending_media_ || it == send_delays_.end()) + return false; + int num_delays = 0; + for (; it != send_delays_.end(); ++it) { + *max_send_delay_ms = std::max(*max_send_delay_ms, it->second); + *avg_send_delay_ms += it->second; + ++num_delays; + } + *avg_send_delay_ms = (*avg_send_delay_ms + num_delays / 2) / num_delays; + return true; +} + int32_t RTPSender::SetTransmissionTimeOffset( const int32_t transmission_time_offset) { if (transmission_time_offset > (0x800000 - 1) || @@ -259,9 +301,9 @@ uint16_t RTPSender::MaxDataPayloadLength() const { if (audio_configured_) { return max_payload_length_ - RTPHeaderLength(); } else { - return max_payload_length_ - RTPHeaderLength() - - video_->FECPacketOverhead() - ((rtx_) ? 2 : 0); - // Include the FEC/ULP/RED overhead. + return max_payload_length_ - RTPHeaderLength() // RTP overhead. + - video_->FECPacketOverhead() // FEC/ULP/RED overhead. + - ((rtx_) ? 2 : 0); // RTX overhead. } } @@ -271,7 +313,7 @@ uint16_t RTPSender::MaxPayloadLength() const { uint16_t RTPSender::PacketOverHead() const { return packet_over_head_; } -void RTPSender::SetRTXStatus(RtxMode mode, bool set_ssrc, uint32_t ssrc) { +void RTPSender::SetRTXStatus(int mode, bool set_ssrc, uint32_t ssrc) { CriticalSectionScoped cs(send_critsect_); rtx_ = mode; if (rtx_ != kRtxOff) { @@ -283,7 +325,7 @@ void RTPSender::SetRTXStatus(RtxMode mode, bool set_ssrc, uint32_t ssrc) { } } -void RTPSender::RTXStatus(RtxMode* mode, uint32_t* ssrc, +void RTPSender::RTXStatus(int* mode, uint32_t* ssrc, int* payload_type) const { CriticalSectionScoped cs(send_critsect_); *mode = rtx_; @@ -361,14 +403,15 @@ int32_t RTPSender::SendOutgoingData( return -1; } + uint32_t ret_val; if (audio_configured_) { TRACE_EVENT_ASYNC_STEP1("webrtc", "Audio", capture_timestamp, "Send", "type", FrameTypeToString(frame_type)); assert(frame_type == kAudioFrameSpeech || frame_type == kAudioFrameCN || frame_type == kFrameEmpty); - return audio_->SendAudio(frame_type, payload_type, capture_timestamp, - payload_data, payload_size, fragmentation); + ret_val = audio_->SendAudio(frame_type, payload_type, capture_timestamp, + payload_data, payload_size, fragmentation); } else { TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", capture_time_ms, "Send", "type", FrameTypeToString(frame_type)); @@ -382,11 +425,45 @@ int32_t RTPSender::SendOutgoingData( return SendPaddingAccordingToBitrate(payload_type, capture_timestamp, capture_time_ms) ? 0 : -1; } - return video_->SendVideo(video_type, frame_type, payload_type, - capture_timestamp, capture_time_ms, payload_data, - payload_size, fragmentation, codec_info, - rtp_type_hdr); + ret_val = video_->SendVideo(video_type, frame_type, payload_type, + capture_timestamp, capture_time_ms, + payload_data, payload_size, + fragmentation, codec_info, + rtp_type_hdr); + } + + CriticalSectionScoped cs(statistics_crit_.get()); + uint32_t frame_count = ++frame_counts_[frame_type]; + if (frame_count_observer_) { + frame_count_observer_->FrameCountUpdated(frame_type, + frame_count, + ssrc_); + } + + return ret_val; +} + +int RTPSender::SendRedundantPayloads(int payload_type, int bytes_to_send) { + if (!(rtx_ & kRtxRedundantPayloads)) + return 0; + uint8_t buffer[IP_PACKET_SIZE]; + int bytes_left = bytes_to_send; + while (bytes_left > 0) { + uint16_t length = bytes_left; + int64_t capture_time_ms; + if (!packet_history_.GetBestFittingPacket(buffer, &length, + &capture_time_ms)) { + break; + } + if (!PrepareAndSendPacket(buffer, length, capture_time_ms, true)) + return -1; + ModuleRTPUtility::RTPHeaderParser rtp_parser(buffer, length); + RTPHeader rtp_header; + rtp_parser.Parse(rtp_header); + bytes_left -= length - rtp_header.headerLength; + } + return bytes_to_send - bytes_left; } bool RTPSender::SendPaddingAccordingToBitrate( @@ -394,7 +471,7 @@ bool RTPSender::SendPaddingAccordingToBitrate( int64_t capture_time_ms) { // Current bitrate since last estimate(1 second) averaged with the // estimate since then, to get the most up to date bitrate. - uint32_t current_bitrate = BitrateNow(); + uint32_t current_bitrate = bitrate_sent_.BitrateNow(); int bitrate_diff = target_send_bitrate_ * 1000 - current_bitrate; if (bitrate_diff <= 0) { return true; @@ -419,6 +496,7 @@ bool RTPSender::SendPaddingAccordingToBitrate( timestamp = start_time_stamp_ + capture_timestamp; timestamp_ = timestamp; capture_time_ms_ = capture_time_ms; + last_timestamp_time_ms_ = clock_->TimeInMilliseconds(); } int bytes_sent = SendPadData(payload_type, timestamp, capture_time_ms, bytes, kDontRetransmit, false, false); @@ -508,11 +586,11 @@ int RTPSender::SendPadData(int payload_type, uint32_t timestamp, void RTPSender::SetStorePacketsStatus(const bool enable, const uint16_t number_to_store) { - packet_history_->SetStorePacketsStatus(enable, number_to_store); + packet_history_.SetStorePacketsStatus(enable, number_to_store); } bool RTPSender::StorePackets() const { - return packet_history_->StorePackets(); + return packet_history_.StorePackets(); } int32_t RTPSender::ReSendPacket(uint16_t packet_id, uint32_t min_resend_time) { @@ -520,40 +598,21 @@ int32_t RTPSender::ReSendPacket(uint16_t packet_id, uint32_t min_resend_time) { uint8_t data_buffer[IP_PACKET_SIZE]; uint8_t *buffer_to_send_ptr = data_buffer; int64_t capture_time_ms; - StorageType type; - if (!packet_history_->GetRTPPacket(packet_id, min_resend_time, data_buffer, - &length, &capture_time_ms, &type)) { + if (!packet_history_.GetPacketAndSetSendTime(packet_id, min_resend_time, true, + data_buffer, &length, + &capture_time_ms)) { // Packet not found. return 0; } - if (length == 0 || type == kDontRetransmit) { - // No bytes copied (packet recently resent, skip resending) or - // packet should not be retransmitted. - return 0; - } - - uint8_t data_buffer_rtx[IP_PACKET_SIZE]; - if (rtx_ != kRtxOff) { - BuildRtxPacket(data_buffer, &length, data_buffer_rtx); - buffer_to_send_ptr = data_buffer_rtx; - } ModuleRTPUtility::RTPHeaderParser rtp_parser(data_buffer, length); RTPHeader header; - rtp_parser.Parse(header); - - // Store the time when the packet was last sent or added to pacer. - packet_history_->UpdateResendTime(packet_id); - - { - // Update send statistics prior to pacer. - CriticalSectionScoped lock(statistics_crit_.get()); - Bitrate::Update(length); - ++packets_sent_; - // We on purpose don't add to payload_bytes_sent_ since this is a - // re-transmit and not new payload data. + if (!rtp_parser.Parse(header)) { + assert(false); + WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_, + "Failed to parse RTP header of packet to be retransmitted."); + return -1; } - TRACE_EVENT_INSTANT2("webrtc_rtp", "RTPSender::ReSendPacket", "timestamp", header.timestamp, "seqnum", header.sequenceNumber); @@ -563,14 +622,22 @@ int32_t RTPSender::ReSendPacket(uint16_t packet_id, uint32_t min_resend_time) { header.ssrc, header.sequenceNumber, capture_time_ms, - length - header.headerLength)) { + length - header.headerLength, + true)) { // We can't send the packet right now. // We will be called when it is time. return length; } } + uint8_t data_buffer_rtx[IP_PACKET_SIZE]; + if ((rtx_ & kRtxRetransmitted) > 0) { + BuildRtxPacket(data_buffer, &length, data_buffer_rtx); + buffer_to_send_ptr = data_buffer_rtx; + } + if (SendPacketToNetwork(buffer_to_send_ptr, length)) { + UpdateRtpStats(buffer_to_send_ptr, length, header, rtx_ != kRtxOff, true); return length; } return -1; @@ -710,43 +777,114 @@ void RTPSender::UpdateNACKBitRate(const uint32_t bytes, // Called from pacer when we can send the packet. bool RTPSender::TimeToSendPacket(uint16_t sequence_number, - int64_t capture_time_ms) { - StorageType type; + int64_t capture_time_ms, + bool retransmission) { uint16_t length = IP_PACKET_SIZE; uint8_t data_buffer[IP_PACKET_SIZE]; int64_t stored_time_ms; - if (packet_history_ == NULL) { + if (!packet_history_.GetPacketAndSetSendTime(sequence_number, + 0, + retransmission, + data_buffer, + &length, + &stored_time_ms)) { // Packet cannot be found. Allow sending to continue. return true; } - if (!packet_history_->GetRTPPacket(sequence_number, 0, data_buffer, &length, - &stored_time_ms, &type)) { - // Packet cannot be found. Allow sending to continue. - return true; + if (!retransmission && capture_time_ms > 0) { + UpdateDelayStatistics(capture_time_ms, clock_->TimeInMilliseconds()); } - assert(length > 0); + return PrepareAndSendPacket(data_buffer, length, capture_time_ms, + retransmission && (rtx_ & kRtxRetransmitted) > 0); +} - ModuleRTPUtility::RTPHeaderParser rtp_parser(data_buffer, length); +bool RTPSender::PrepareAndSendPacket(uint8_t* buffer, + uint16_t length, + int64_t capture_time_ms, + bool send_over_rtx) { + uint8_t *buffer_to_send_ptr = buffer; + + ModuleRTPUtility::RTPHeaderParser rtp_parser(buffer, length); RTPHeader rtp_header; rtp_parser.Parse(rtp_header); TRACE_EVENT_INSTANT2("webrtc_rtp", "RTPSender::TimeToSendPacket", "timestamp", rtp_header.timestamp, - "seqnum", sequence_number); + "seqnum", rtp_header.sequenceNumber); + + uint8_t data_buffer_rtx[IP_PACKET_SIZE]; + if (send_over_rtx) { + BuildRtxPacket(buffer, &length, data_buffer_rtx); + buffer_to_send_ptr = data_buffer_rtx; + } int64_t now_ms = clock_->TimeInMilliseconds(); int64_t diff_ms = now_ms - capture_time_ms; bool updated_transmission_time_offset = - UpdateTransmissionTimeOffset(data_buffer, length, rtp_header, diff_ms); + UpdateTransmissionTimeOffset(buffer_to_send_ptr, length, rtp_header, + diff_ms); bool updated_abs_send_time = - UpdateAbsoluteSendTime(data_buffer, length, rtp_header, now_ms); + UpdateAbsoluteSendTime(buffer_to_send_ptr, length, rtp_header, now_ms); if (updated_transmission_time_offset || updated_abs_send_time) { // Update stored packet in case of receiving a re-transmission request. - packet_history_->ReplaceRTPHeader(data_buffer, - rtp_header.sequenceNumber, - rtp_header.headerLength); + packet_history_.ReplaceRTPHeader(buffer_to_send_ptr, + rtp_header.sequenceNumber, + rtp_header.headerLength); } - return SendPacketToNetwork(data_buffer, length); + + bool ret = SendPacketToNetwork(buffer_to_send_ptr, length); + UpdateRtpStats(buffer_to_send_ptr, length, rtp_header, false, false); + return ret; +} + +void RTPSender::UpdateRtpStats(const uint8_t* buffer, + uint32_t size, + const RTPHeader& header, + bool is_rtx, + bool is_retransmit) { + StreamDataCounters* counters; + // Get ssrc before taking statistics_crit_ to avoid possible deadlock. + uint32_t ssrc = SSRC(); + + CriticalSectionScoped lock(statistics_crit_.get()); + if (is_rtx) { + counters = &rtx_rtp_stats_; + ssrc = ssrc_rtx_; + } else { + counters = &rtp_stats_; + } + + bitrate_sent_.Update(size); + ++counters->packets; + if (IsFecPacket(buffer, header)) { + ++counters->fec_packets; + } + + if (is_retransmit) { + ++counters->retransmitted_packets; + } else { + counters->bytes += size - (header.headerLength + header.paddingLength); + counters->header_bytes += header.headerLength; + counters->padding_bytes += header.paddingLength; + } + + if (rtp_stats_callback_) { + rtp_stats_callback_->DataCountersUpdated(*counters, ssrc); + } +} + +bool RTPSender::IsFecPacket(const uint8_t* buffer, + const RTPHeader& header) const { + if (!video_) { + return false; + } + bool fec_enabled; + uint8_t pt_red; + uint8_t pt_fec; + video_->GenericFECStatus(fec_enabled, pt_red, pt_fec); + return fec_enabled && + header.payloadType == pt_red && + buffer[header.headerLength] == pt_fec; } int RTPSender::TimeToSendPadding(int bytes) { @@ -758,12 +896,25 @@ int RTPSender::TimeToSendPadding(int bytes) { uint32_t timestamp; { CriticalSectionScoped cs(send_critsect_); - payload_type = (rtx_ == kRtxOff) ? payload_type_ : payload_type_rtx_; + payload_type = ((rtx_ & kRtxRedundantPayloads) > 0) ? payload_type_rtx_ : + payload_type_; timestamp = timestamp_; capture_time_ms = capture_time_ms_; + if (last_timestamp_time_ms_ > 0) { + timestamp += + (clock_->TimeInMilliseconds() - last_timestamp_time_ms_) * 90; + capture_time_ms += + (clock_->TimeInMilliseconds() - last_timestamp_time_ms_); + } } - return SendPadData(payload_type, timestamp, capture_time_ms, bytes, - kDontStore, true, true); + int bytes_sent = SendRedundantPayloads(payload_type, bytes); + bytes -= bytes_sent; + if (bytes > 0) { + int padding_sent = SendPadData(payload_type, timestamp, capture_time_ms, + bytes, kDontStore, true, true); + bytes_sent += padding_sent; + } + return bytes_sent; } // TODO(pwestin): send in the RTPHeaderParser to avoid parsing it again. @@ -790,53 +941,41 @@ int32_t RTPSender::SendToNetwork( rtp_header, now_ms); // Used for NACK and to spread out the transmission of packets. - if (packet_history_->PutRTPPacket(buffer, rtp_header_length + payload_length, - max_payload_length_, capture_time_ms, - storage) != 0) { + if (packet_history_.PutRTPPacket(buffer, rtp_header_length + payload_length, + max_payload_length_, capture_time_ms, + storage) != 0) { return -1; } - // Create and send RTX Packet. - // TODO(pwesin): This should be moved to its own code path triggered by pacer. - bool rtx_sent = false; - if (rtx_ == kRtxAll && storage == kAllowRetransmission) { - uint16_t length_rtx = payload_length + rtp_header_length; - uint8_t data_buffer_rtx[IP_PACKET_SIZE]; - BuildRtxPacket(buffer, &length_rtx, data_buffer_rtx); - if (!SendPacketToNetwork(data_buffer_rtx, length_rtx)) return -1; - rtx_sent = true; - } - { - // Update send statistics prior to pacer. - CriticalSectionScoped lock(statistics_crit_.get()); - Bitrate::Update(payload_length + rtp_header_length); - ++packets_sent_; - payload_bytes_sent_ += payload_length; - if (rtx_sent) { - // The RTX packet. - ++packets_sent_; - payload_bytes_sent_ += payload_length; - } - } - if (paced_sender_ && storage != kDontStore) { if (!paced_sender_->SendPacket(priority, rtp_header.ssrc, rtp_header.sequenceNumber, capture_time_ms, - payload_length)) { + payload_length, false)) { // We can't send the packet right now. // We will be called when it is time. return 0; } } - if (SendPacketToNetwork(buffer, payload_length + rtp_header_length)) { - return 0; + if (capture_time_ms > 0) { + UpdateDelayStatistics(capture_time_ms, now_ms); } - return -1; + uint32_t length = payload_length + rtp_header_length; + if (!SendPacketToNetwork(buffer, length)) + return -1; + UpdateRtpStats(buffer, length, rtp_header, false, false); + return 0; +} + +void RTPSender::UpdateDelayStatistics(int64_t capture_time_ms, int64_t now_ms) { + CriticalSectionScoped cs(statistics_crit_.get()); + send_delays_[now_ms] = now_ms - capture_time_ms; + send_delays_.erase(send_delays_.begin(), + send_delays_.lower_bound(now_ms - kSendSideDelayWindowMs)); } void RTPSender::ProcessBitrate() { CriticalSectionScoped cs(send_critsect_); - Bitrate::Process(); + bitrate_sent_.Process(); nack_bitrate_.Process(); if (audio_configured_) { return; @@ -860,19 +999,23 @@ uint16_t RTPSender::IncrementSequenceNumber() { void RTPSender::ResetDataCounters() { CriticalSectionScoped lock(statistics_crit_.get()); - packets_sent_ = 0; - payload_bytes_sent_ = 0; + rtp_stats_ = StreamDataCounters(); + rtx_rtp_stats_ = StreamDataCounters(); + if (rtp_stats_callback_) { + rtp_stats_callback_->DataCountersUpdated(rtp_stats_, ssrc_); + rtp_stats_callback_->DataCountersUpdated(rtx_rtp_stats_, ssrc_rtx_); + } } uint32_t RTPSender::Packets() const { CriticalSectionScoped lock(statistics_crit_.get()); - return packets_sent_; + return rtp_stats_.packets + rtx_rtp_stats_.packets; } // Number of sent RTP bytes. uint32_t RTPSender::Bytes() const { CriticalSectionScoped lock(statistics_crit_.get()); - return payload_bytes_sent_; + return rtp_stats_.bytes + rtx_rtp_stats_.bytes; } int RTPSender::CreateRTPHeader( @@ -931,6 +1074,7 @@ int32_t RTPSender::BuildRTPheader( // timing. timestamp_++; } + last_timestamp_time_ms_ = clock_->TimeInMilliseconds(); uint32_t sequence_number = sequence_number_++; capture_time_ms_ = capture_time_ms; last_packet_marker_bit_ = marker_bit; @@ -1446,4 +1590,49 @@ void RTPSender::BuildRtxPacket(uint8_t* buffer, uint16_t* length, *length += 2; } +void RTPSender::RegisterFrameCountObserver(FrameCountObserver* observer) { + CriticalSectionScoped cs(statistics_crit_.get()); + if (observer != NULL) + assert(frame_count_observer_ == NULL); + frame_count_observer_ = observer; +} + +FrameCountObserver* RTPSender::GetFrameCountObserver() const { + CriticalSectionScoped cs(statistics_crit_.get()); + return frame_count_observer_; +} + +void RTPSender::RegisterRtpStatisticsCallback( + StreamDataCountersCallback* callback) { + CriticalSectionScoped cs(statistics_crit_.get()); + if (callback != NULL) + assert(rtp_stats_callback_ == NULL); + rtp_stats_callback_ = callback; +} + +StreamDataCountersCallback* RTPSender::GetRtpStatisticsCallback() const { + CriticalSectionScoped cs(statistics_crit_.get()); + return rtp_stats_callback_; +} + +void RTPSender::RegisterBitrateObserver(BitrateStatisticsObserver* observer) { + CriticalSectionScoped cs(statistics_crit_.get()); + if (observer != NULL) + assert(bitrate_callback_ == NULL); + bitrate_callback_ = observer; +} + +BitrateStatisticsObserver* RTPSender::GetBitrateObserver() const { + CriticalSectionScoped cs(statistics_crit_.get()); + return bitrate_callback_; +} + +uint32_t RTPSender::BitrateSent() const { return bitrate_sent_.BitrateLast(); } + +void RTPSender::BitrateUpdated(const BitrateStatistics& stats) { + CriticalSectionScoped cs(statistics_crit_.get()); + if (bitrate_callback_) { + bitrate_callback_->Notify(stats, ssrc_); + } +} } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_sender.h b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_sender.h index e0ead822259a..e1cc3a182a75 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_sender.h +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_sender.h @@ -21,6 +21,7 @@ #include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h" #include "webrtc/modules/rtp_rtcp/source/bitrate.h" #include "webrtc/modules/rtp_rtcp/source/rtp_header_extension.h" +#include "webrtc/modules/rtp_rtcp/source/rtp_packet_history.h" #include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_config.h" #include "webrtc/modules/rtp_rtcp/source/ssrc_database.h" #include "webrtc/modules/rtp_rtcp/source/video_codec_information.h" @@ -30,7 +31,6 @@ namespace webrtc { class CriticalSectionWrapper; -class RTPPacketHistory; class RTPSenderAudio; class RTPSenderVideo; @@ -63,7 +63,7 @@ class RTPSenderInterface { PacedSender::Priority priority) = 0; }; -class RTPSender : public Bitrate, public RTPSenderInterface { +class RTPSender : public RTPSenderInterface, public Bitrate::Observer { public: RTPSender(const int32_t id, const bool audio, Clock *clock, Transport *transport, RtpAudioFeedback *audio_feedback, @@ -78,6 +78,10 @@ class RTPSender : public Bitrate, public RTPSenderInterface { uint32_t FecOverheadRate() const; uint32_t NackOverheadRate() const; + // Returns true if the statistics have been calculated, and false if no frame + // was sent within the statistics window. + bool GetSendSideDelay(int* avg_send_delay_ms, int* max_send_delay_ms) const; + void SetTargetSendBitrate(const uint32_t bits); virtual uint16_t MaxDataPayloadLength() const @@ -134,10 +138,6 @@ class RTPSender : public Bitrate, public RTPSenderInterface { VideoCodecInformation *codec_info = NULL, const RTPVideoTypeHeader * rtp_type_hdr = NULL); - int BuildPaddingPacket(uint8_t* packet, int header_length, int32_t bytes); - int SendPadData(int payload_type, uint32_t timestamp, int64_t capture_time_ms, - int32_t bytes, StorageType store, - bool force_full_size_packets, bool only_pad_after_markerbit); // RTP header extension int32_t SetTransmissionTimeOffset( const int32_t transmission_time_offset); @@ -167,7 +167,8 @@ class RTPSender : public Bitrate, public RTPSenderInterface { const RTPHeader &rtp_header, const int64_t now_ms) const; - bool TimeToSendPacket(uint16_t sequence_number, int64_t capture_time_ms); + bool TimeToSendPacket(uint16_t sequence_number, int64_t capture_time_ms, + bool retransmission); int TimeToSendPadding(int bytes); // NACK. @@ -186,9 +187,9 @@ class RTPSender : public Bitrate, public RTPSenderInterface { bool ProcessNACKBitRate(const uint32_t now); // RTX. - void SetRTXStatus(RtxMode mode, bool set_ssrc, uint32_t ssrc); + void SetRTXStatus(int mode, bool set_ssrc, uint32_t ssrc); - void RTXStatus(RtxMode* mode, uint32_t* ssrc, int* payload_type) const; + void RTXStatus(int* mode, uint32_t* ssrc, int* payload_type) const; void SetRtxPayloadType(int payloadType); @@ -263,11 +264,35 @@ class RTPSender : public Bitrate, public RTPSenderInterface { int32_t SetFecParameters(const FecProtectionParams *delta_params, const FecProtectionParams *key_params); + virtual void RegisterFrameCountObserver(FrameCountObserver* observer); + virtual FrameCountObserver* GetFrameCountObserver() const; + + int SendPadData(int payload_type, uint32_t timestamp, int64_t capture_time_ms, + int32_t bytes, StorageType store, + bool force_full_size_packets, bool only_pad_after_markerbit); + + // Called on update of RTP statistics. + void RegisterRtpStatisticsCallback(StreamDataCountersCallback* callback); + StreamDataCountersCallback* GetRtpStatisticsCallback() const; + + // Called on new send bitrate estimate. + void RegisterBitrateObserver(BitrateStatisticsObserver* observer); + BitrateStatisticsObserver* GetBitrateObserver() const; + + uint32_t BitrateSent() const; + + virtual void BitrateUpdated(const BitrateStatistics& stats) OVERRIDE; + protected: int32_t CheckPayloadType(const int8_t payload_type, RtpVideoCodecTypes *video_type); private: + // Maps capture time in milliseconds to send-side delay in milliseconds. + // Send-side delay is the difference between transmission time and capture + // time. + typedef std::map SendDelayMap; + int CreateRTPHeader(uint8_t* header, int8_t payload_type, uint32_t ssrc, bool marker_bit, uint32_t timestamp, uint16_t sequence_number, @@ -275,15 +300,35 @@ class RTPSender : public Bitrate, public RTPSenderInterface { void UpdateNACKBitRate(const uint32_t bytes, const uint32_t now); + bool PrepareAndSendPacket(uint8_t* buffer, + uint16_t length, + int64_t capture_time_ms, + bool send_over_rtx); + + int SendRedundantPayloads(int payload_type, int bytes); + bool SendPaddingAccordingToBitrate(int8_t payload_type, uint32_t capture_timestamp, int64_t capture_time_ms); + int BuildPaddingPacket(uint8_t* packet, int header_length, int32_t bytes); void BuildRtxPacket(uint8_t* buffer, uint16_t* length, uint8_t* buffer_rtx); bool SendPacketToNetwork(const uint8_t *packet, uint32_t size); + void UpdateDelayStatistics(int64_t capture_time_ms, int64_t now_ms); + + void UpdateRtpStats(const uint8_t* buffer, + uint32_t size, + const RTPHeader& header, + bool is_rtx, + bool is_retransmit); + bool IsFecPacket(const uint8_t* buffer, const RTPHeader& header) const; + + Clock* clock_; + Bitrate bitrate_sent_; + int32_t id_; const bool audio_configured_; RTPSenderAudio *audio_; @@ -311,12 +356,17 @@ class RTPSender : public Bitrate, public RTPSenderInterface { int32_t nack_byte_count_[NACK_BYTECOUNT_SIZE]; Bitrate nack_bitrate_; - RTPPacketHistory *packet_history_; + RTPPacketHistory packet_history_; // Statistics scoped_ptr statistics_crit_; - uint32_t packets_sent_; - uint32_t payload_bytes_sent_; + SendDelayMap send_delays_; + std::map frame_counts_; + FrameCountObserver* frame_count_observer_; + StreamDataCounters rtp_stats_; + StreamDataCounters rtx_rtp_stats_; + StreamDataCountersCallback* rtp_stats_callback_; + BitrateStatisticsObserver* bitrate_callback_; // RTP variables bool start_time_stamp_forced_; @@ -330,11 +380,12 @@ class RTPSender : public Bitrate, public RTPSenderInterface { uint32_t ssrc_; uint32_t timestamp_; int64_t capture_time_ms_; + int64_t last_timestamp_time_ms_; bool last_packet_marker_bit_; uint8_t num_csrcs_; uint32_t csrcs_[kRtpCsrcSize]; bool include_csrcs_; - RtxMode rtx_; + int rtx_; uint32_t ssrc_rtx_; int payload_type_rtx_; }; diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc index 83ee7f68df4a..ce615be04943 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc @@ -15,11 +15,13 @@ #include "testing/gtest/include/gtest/gtest.h" #include "webrtc/modules/pacing/include/mock/mock_paced_sender.h" +#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h" #include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h" #include "webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h" #include "webrtc/modules/rtp_rtcp/source/rtp_header_extension.h" #include "webrtc/modules/rtp_rtcp/source/rtp_sender.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/test/mock_transport.h" #include "webrtc/typedefs.h" namespace webrtc { @@ -36,6 +38,7 @@ const uint32_t kAbsoluteSendTime = 0x00aabbcc; const uint8_t kAudioLevel = 0x5a; const uint8_t kAudioLevelExtensionId = 9; const int kAudioPayload = 103; +const uint64_t kStartTime = 123456789; } // namespace using testing::_; @@ -52,6 +55,10 @@ uint16_t GetPayloadDataLength(const RTPHeader& rtp_header, return static_cast(length); } +uint64_t ConvertMsToAbsSendTime(int64_t time_ms) { + return 0x00fffffful & ((time_ms << 18) / 1000); +} + class LoopbackTransportTest : public webrtc::Transport { public: LoopbackTransportTest() @@ -75,14 +82,14 @@ class LoopbackTransportTest : public webrtc::Transport { class RtpSenderTest : public ::testing::Test { protected: RtpSenderTest() - : fake_clock_(123456789), - mock_paced_sender_(), - rtp_sender_(), - payload_(kPayload), - transport_(), - kMarkerBit(true) { + : fake_clock_(kStartTime), + mock_paced_sender_(), + rtp_sender_(), + payload_(kPayload), + transport_(), + kMarkerBit(true) { EXPECT_CALL(mock_paced_sender_, - SendPacket(_, _, _, _, _)).WillRepeatedly(testing::Return(true)); + SendPacket(_, _, _, _, _, _)).WillRepeatedly(testing::Return(true)); } virtual void SetUp() { @@ -108,6 +115,23 @@ class RtpSenderTest : public ::testing::Test { EXPECT_EQ(0, rtp_header.numCSRCs); EXPECT_EQ(0, rtp_header.paddingLength); } + + void SendPacket(int64_t capture_time_ms, int payload_length) { + uint32_t timestamp = capture_time_ms * 90; + int32_t rtp_length = rtp_sender_->BuildRTPheader(packet_, + kPayload, + kMarkerBit, + timestamp, + capture_time_ms); + + // Packet should be stored in a send bucket. + EXPECT_EQ(0, rtp_sender_->SendToNetwork(packet_, + payload_length, + rtp_length, + capture_time_ms, + kAllowRetransmission, + PacedSender::kNormalPriority)); + } }; TEST_F(RtpSenderTest, RegisterRtpTransmissionTimeOffsetHeaderExtension) { @@ -344,7 +368,7 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithHeaderExtensions) { TEST_F(RtpSenderTest, TrafficSmoothingWithExtensions) { EXPECT_CALL(mock_paced_sender_, - SendPacket(PacedSender::kNormalPriority, _, kSeqNum, _, _)). + SendPacket(PacedSender::kNormalPriority, _, kSeqNum, _, _, _)). WillOnce(testing::Return(false)); rtp_sender_->SetStorePacketsStatus(true, 10); @@ -373,7 +397,7 @@ TEST_F(RtpSenderTest, TrafficSmoothingWithExtensions) { const int kStoredTimeInMs = 100; fake_clock_.AdvanceTimeMilliseconds(kStoredTimeInMs); - rtp_sender_->TimeToSendPacket(kSeqNum, capture_time_ms); + rtp_sender_->TimeToSendPacket(kSeqNum, capture_time_ms, false); // Process send bucket. Packet should now be sent. EXPECT_EQ(1, transport_.packets_sent_); @@ -392,13 +416,13 @@ TEST_F(RtpSenderTest, TrafficSmoothingWithExtensions) { // Verify transmission time offset. EXPECT_EQ(kStoredTimeInMs * 90, rtp_header.extension.transmissionTimeOffset); uint64_t expected_send_time = - 0x00fffffful & ((fake_clock_.TimeInMilliseconds() << 18) / 1000); + ConvertMsToAbsSendTime(fake_clock_.TimeInMilliseconds()); EXPECT_EQ(expected_send_time, rtp_header.extension.absoluteSendTime); } TEST_F(RtpSenderTest, TrafficSmoothingRetransmits) { EXPECT_CALL(mock_paced_sender_, - SendPacket(PacedSender::kNormalPriority, _, kSeqNum, _, _)). + SendPacket(PacedSender::kNormalPriority, _, kSeqNum, _, _, _)). WillOnce(testing::Return(false)); rtp_sender_->SetStorePacketsStatus(true, 10); @@ -425,7 +449,7 @@ TEST_F(RtpSenderTest, TrafficSmoothingRetransmits) { EXPECT_EQ(0, transport_.packets_sent_); EXPECT_CALL(mock_paced_sender_, - SendPacket(PacedSender::kHighPriority, _, kSeqNum, _, _)). + SendPacket(PacedSender::kHighPriority, _, kSeqNum, _, _, _)). WillOnce(testing::Return(false)); const int kStoredTimeInMs = 100; @@ -434,7 +458,7 @@ TEST_F(RtpSenderTest, TrafficSmoothingRetransmits) { EXPECT_EQ(rtp_length, rtp_sender_->ReSendPacket(kSeqNum)); EXPECT_EQ(0, transport_.packets_sent_); - rtp_sender_->TimeToSendPacket(kSeqNum, capture_time_ms); + rtp_sender_->TimeToSendPacket(kSeqNum, capture_time_ms, false); // Process send bucket. Packet should now be sent. EXPECT_EQ(1, transport_.packets_sent_); @@ -454,10 +478,192 @@ TEST_F(RtpSenderTest, TrafficSmoothingRetransmits) { // Verify transmission time offset. EXPECT_EQ(kStoredTimeInMs * 90, rtp_header.extension.transmissionTimeOffset); uint64_t expected_send_time = - 0x00fffffful & ((fake_clock_.TimeInMilliseconds() << 18) / 1000); + ConvertMsToAbsSendTime(fake_clock_.TimeInMilliseconds()); EXPECT_EQ(expected_send_time, rtp_header.extension.absoluteSendTime); } +// This test sends 1 regular video packet, then 4 padding packets, and then +// 1 more regular packet. +TEST_F(RtpSenderTest, SendPadding) { + // Make all (non-padding) packets go to send queue. + EXPECT_CALL(mock_paced_sender_, + SendPacket(PacedSender::kNormalPriority, _, _, _, _, _)). + WillRepeatedly(testing::Return(false)); + + uint16_t seq_num = kSeqNum; + uint32_t timestamp = kTimestamp; + rtp_sender_->SetStorePacketsStatus(true, 10); + int rtp_header_len = 12; + EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension( + kRtpExtensionTransmissionTimeOffset, kTransmissionTimeOffsetExtensionId)); + rtp_header_len += 4; // 4 bytes extension. + EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension( + kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId)); + rtp_header_len += 4; // 4 bytes extension. + rtp_header_len += 4; // 4 extra bytes common to all extension headers. + + // Create and set up parser. + scoped_ptr rtp_parser( + webrtc::RtpHeaderParser::Create()); + ASSERT_TRUE(rtp_parser.get() != NULL); + rtp_parser->RegisterRtpHeaderExtension(kRtpExtensionTransmissionTimeOffset, + kTransmissionTimeOffsetExtensionId); + rtp_parser->RegisterRtpHeaderExtension(kRtpExtensionAbsoluteSendTime, + kAbsoluteSendTimeExtensionId); + webrtc::RTPHeader rtp_header; + + rtp_sender_->SetTargetSendBitrate(300000); + int64_t capture_time_ms = fake_clock_.TimeInMilliseconds(); + int32_t rtp_length = rtp_sender_->BuildRTPheader(packet_, + kPayload, + kMarkerBit, + timestamp, + capture_time_ms); + + // Packet should be stored in a send bucket. + EXPECT_EQ(0, rtp_sender_->SendToNetwork(packet_, + 0, + rtp_length, + capture_time_ms, + kAllowRetransmission, + PacedSender::kNormalPriority)); + + int total_packets_sent = 0; + EXPECT_EQ(total_packets_sent, transport_.packets_sent_); + + const int kStoredTimeInMs = 100; + fake_clock_.AdvanceTimeMilliseconds(kStoredTimeInMs); + rtp_sender_->TimeToSendPacket(seq_num++, capture_time_ms, false); + // Packet should now be sent. This test doesn't verify the regular video + // packet, since it is tested in another test. + EXPECT_EQ(++total_packets_sent, transport_.packets_sent_); + timestamp += 90 * kStoredTimeInMs; + + // Send padding 4 times, waiting 50 ms between each. + for (int i = 0; i < 4; ++i) { + const int kPaddingPeriodMs = 50; + const int kPaddingBytes = 100; + const int kMaxPaddingLength = 224; // Value taken from rtp_sender.cc. + // Padding will be forced to full packets. + EXPECT_EQ(kMaxPaddingLength, rtp_sender_->TimeToSendPadding(kPaddingBytes)); + + // Process send bucket. Padding should now be sent. + EXPECT_EQ(++total_packets_sent, transport_.packets_sent_); + EXPECT_EQ(kMaxPaddingLength + rtp_header_len, + transport_.last_sent_packet_len_); + // Parse sent packet. + ASSERT_TRUE(rtp_parser->Parse(transport_.last_sent_packet_, kPaddingBytes, + &rtp_header)); + + // Verify sequence number and timestamp. + EXPECT_EQ(seq_num++, rtp_header.sequenceNumber); + EXPECT_EQ(timestamp, rtp_header.timestamp); + // Verify transmission time offset. + EXPECT_EQ(0, rtp_header.extension.transmissionTimeOffset); + uint64_t expected_send_time = + ConvertMsToAbsSendTime(fake_clock_.TimeInMilliseconds()); + EXPECT_EQ(expected_send_time, rtp_header.extension.absoluteSendTime); + fake_clock_.AdvanceTimeMilliseconds(kPaddingPeriodMs); + timestamp += 90 * kPaddingPeriodMs; + } + + // Send a regular video packet again. + capture_time_ms = fake_clock_.TimeInMilliseconds(); + rtp_length = rtp_sender_->BuildRTPheader(packet_, + kPayload, + kMarkerBit, + timestamp, + capture_time_ms); + + // Packet should be stored in a send bucket. + EXPECT_EQ(0, rtp_sender_->SendToNetwork(packet_, + 0, + rtp_length, + capture_time_ms, + kAllowRetransmission, + PacedSender::kNormalPriority)); + + rtp_sender_->TimeToSendPacket(seq_num, capture_time_ms, false); + // Process send bucket. + EXPECT_EQ(++total_packets_sent, transport_.packets_sent_); + EXPECT_EQ(rtp_length, transport_.last_sent_packet_len_); + // Parse sent packet. + ASSERT_TRUE(rtp_parser->Parse(transport_.last_sent_packet_, rtp_length, + &rtp_header)); + + // Verify sequence number and timestamp. + EXPECT_EQ(seq_num, rtp_header.sequenceNumber); + EXPECT_EQ(timestamp, rtp_header.timestamp); + // Verify transmission time offset. This packet is sent without delay. + EXPECT_EQ(0, rtp_header.extension.transmissionTimeOffset); + uint64_t expected_send_time = + ConvertMsToAbsSendTime(fake_clock_.TimeInMilliseconds()); + EXPECT_EQ(expected_send_time, rtp_header.extension.absoluteSendTime); +} + +TEST_F(RtpSenderTest, SendRedundantPayloads) { + MockTransport transport; + rtp_sender_.reset(new RTPSender(0, false, &fake_clock_, &transport, NULL, + &mock_paced_sender_)); + rtp_sender_->SetSequenceNumber(kSeqNum); + // Make all packets go through the pacer. + EXPECT_CALL(mock_paced_sender_, + SendPacket(PacedSender::kNormalPriority, _, _, _, _, _)). + WillRepeatedly(testing::Return(false)); + + uint16_t seq_num = kSeqNum; + rtp_sender_->SetStorePacketsStatus(true, 10); + int rtp_header_len = 12; + EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension( + kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId)); + rtp_header_len += 4; // 4 bytes extension. + rtp_header_len += 4; // 4 extra bytes common to all extension headers. + + rtp_sender_->SetRTXStatus(kRtxRetransmitted | kRtxRedundantPayloads, true, + 1234); + + // Create and set up parser. + scoped_ptr rtp_parser( + webrtc::RtpHeaderParser::Create()); + ASSERT_TRUE(rtp_parser.get() != NULL); + rtp_parser->RegisterRtpHeaderExtension(kRtpExtensionTransmissionTimeOffset, + kTransmissionTimeOffsetExtensionId); + rtp_parser->RegisterRtpHeaderExtension(kRtpExtensionAbsoluteSendTime, + kAbsoluteSendTimeExtensionId); + rtp_sender_->SetTargetSendBitrate(300000); + const size_t kNumPayloadSizes = 10; + const int kPayloadSizes[kNumPayloadSizes] = {500, 550, 600, 650, 700, 750, + 800, 850, 900, 950}; + // Send 10 packets of increasing size. + for (size_t i = 0; i < kNumPayloadSizes; ++i) { + int64_t capture_time_ms = fake_clock_.TimeInMilliseconds(); + EXPECT_CALL(transport, SendPacket(_, _, _)) + .WillOnce(testing::ReturnArg<2>()); + SendPacket(capture_time_ms, kPayloadSizes[i]); + rtp_sender_->TimeToSendPacket(seq_num++, capture_time_ms, false); + fake_clock_.AdvanceTimeMilliseconds(33); + } + const int kPaddingPayloadSize = 224; + // The amount of padding to send it too small to send a payload packet. + EXPECT_CALL(transport, SendPacket(_, _, kPaddingPayloadSize + rtp_header_len)) + .WillOnce(testing::ReturnArg<2>()); + EXPECT_EQ(kPaddingPayloadSize, rtp_sender_->TimeToSendPadding(49)); + + const int kRtxHeaderSize = 2; + EXPECT_CALL(transport, SendPacket(_, _, kPayloadSizes[0] + + rtp_header_len + kRtxHeaderSize)) + .WillOnce(testing::ReturnArg<2>()); + EXPECT_EQ(kPayloadSizes[0], rtp_sender_->TimeToSendPadding(500)); + + EXPECT_CALL(transport, SendPacket(_, _, kPayloadSizes[kNumPayloadSizes - 1] + + rtp_header_len + kRtxHeaderSize)) + .WillOnce(testing::ReturnArg<2>()); + EXPECT_CALL(transport, SendPacket(_, _, kPaddingPayloadSize + rtp_header_len)) + .WillOnce(testing::ReturnArg<2>()); + EXPECT_EQ(kPayloadSizes[kNumPayloadSizes - 1] + kPaddingPayloadSize, + rtp_sender_->TimeToSendPadding(999)); +} + TEST_F(RtpSenderTest, SendGenericVideo) { char payload_name[RTP_PAYLOAD_NAME_SIZE] = "GENERIC"; const uint8_t payload_type = 127; @@ -512,6 +718,135 @@ TEST_F(RtpSenderTest, SendGenericVideo) { EXPECT_EQ(0, memcmp(payload, payload_data, sizeof(payload))); } +TEST_F(RtpSenderTest, FrameCountCallbacks) { + class TestCallback : public FrameCountObserver { + public: + TestCallback() + : FrameCountObserver(), num_calls_(0), ssrc_(0), + key_frames_(0), delta_frames_(0) {} + virtual ~TestCallback() {} + + virtual void FrameCountUpdated(FrameType frame_type, + uint32_t frame_count, + const unsigned int ssrc) { + ++num_calls_; + ssrc_ = ssrc; + switch (frame_type) { + case kVideoFrameDelta: + delta_frames_ = frame_count; + break; + case kVideoFrameKey: + key_frames_ = frame_count; + break; + default: + break; + } + } + + uint32_t num_calls_; + uint32_t ssrc_; + uint32_t key_frames_; + uint32_t delta_frames_; + } callback; + + char payload_name[RTP_PAYLOAD_NAME_SIZE] = "GENERIC"; + const uint8_t payload_type = 127; + ASSERT_EQ(0, rtp_sender_->RegisterPayload(payload_name, payload_type, 90000, + 0, 1500)); + uint8_t payload[] = {47, 11, 32, 93, 89}; + rtp_sender_->SetStorePacketsStatus(true, 1); + uint32_t ssrc = rtp_sender_->SSRC(); + + rtp_sender_->RegisterFrameCountObserver(&callback); + + ASSERT_EQ(0, rtp_sender_->SendOutgoingData(kVideoFrameKey, payload_type, 1234, + 4321, payload, sizeof(payload), + NULL)); + + EXPECT_EQ(1U, callback.num_calls_); + EXPECT_EQ(ssrc, callback.ssrc_); + EXPECT_EQ(1U, callback.key_frames_); + EXPECT_EQ(0U, callback.delta_frames_); + + ASSERT_EQ(0, rtp_sender_->SendOutgoingData(kVideoFrameDelta, + payload_type, 1234, 4321, payload, + sizeof(payload), NULL)); + + EXPECT_EQ(2U, callback.num_calls_); + EXPECT_EQ(ssrc, callback.ssrc_); + EXPECT_EQ(1U, callback.key_frames_); + EXPECT_EQ(1U, callback.delta_frames_); + + rtp_sender_->RegisterFrameCountObserver(NULL); +} + +TEST_F(RtpSenderTest, BitrateCallbacks) { + class TestCallback : public BitrateStatisticsObserver { + public: + TestCallback() + : BitrateStatisticsObserver(), num_calls_(0), ssrc_(0), bitrate_() {} + virtual ~TestCallback() {} + + virtual void Notify(const BitrateStatistics& stats, uint32_t ssrc) { + ++num_calls_; + ssrc_ = ssrc; + bitrate_ = stats; + } + + uint32_t num_calls_; + uint32_t ssrc_; + BitrateStatistics bitrate_; + } callback; + + // Simulate kNumPackets sent with kPacketInterval ms intervals. + const uint32_t kNumPackets = 15; + const uint32_t kPacketInterval = 20; + // Overhead = 12 bytes RTP header + 1 byte generic header. + const uint32_t kPacketOverhead = 13; + + char payload_name[RTP_PAYLOAD_NAME_SIZE] = "GENERIC"; + const uint8_t payload_type = 127; + ASSERT_EQ( + 0, + rtp_sender_->RegisterPayload(payload_name, payload_type, 90000, 0, 1500)); + uint8_t payload[] = {47, 11, 32, 93, 89}; + rtp_sender_->SetStorePacketsStatus(true, 1); + uint32_t ssrc = rtp_sender_->SSRC(); + + rtp_sender_->RegisterBitrateObserver(&callback); + + // Initial process call so we get a new time window. + rtp_sender_->ProcessBitrate(); + uint64_t start_time = fake_clock_.CurrentNtpInMilliseconds(); + + // Send a few frames. + for (uint32_t i = 0; i < kNumPackets; ++i) { + ASSERT_EQ(0, + rtp_sender_->SendOutgoingData(kVideoFrameKey, + payload_type, + 1234, + 4321, + payload, + sizeof(payload), + 0)); + fake_clock_.AdvanceTimeMilliseconds(kPacketInterval); + } + + rtp_sender_->ProcessBitrate(); + + const uint32_t expected_packet_rate = 1000 / kPacketInterval; + + EXPECT_EQ(1U, callback.num_calls_); + EXPECT_EQ(ssrc, callback.ssrc_); + EXPECT_EQ(start_time + (kNumPackets * kPacketInterval), + callback.bitrate_.timestamp_ms); + EXPECT_EQ(expected_packet_rate, callback.bitrate_.packet_rate); + EXPECT_EQ((kPacketOverhead + sizeof(payload)) * 8 * expected_packet_rate, + callback.bitrate_.bitrate_bps); + + rtp_sender_->RegisterBitrateObserver(NULL); +} + class RtpSenderAudioTest : public RtpSenderTest { protected: RtpSenderAudioTest() {} @@ -524,6 +859,86 @@ class RtpSenderAudioTest : public RtpSenderTest { } }; +TEST_F(RtpSenderTest, StreamDataCountersCallbacks) { + class TestCallback : public StreamDataCountersCallback { + public: + TestCallback() + : StreamDataCountersCallback(), ssrc_(0), counters_() {} + virtual ~TestCallback() {} + + virtual void DataCountersUpdated(const StreamDataCounters& counters, + uint32_t ssrc) { + ssrc_ = ssrc; + counters_ = counters; + } + + uint32_t ssrc_; + StreamDataCounters counters_; + bool Matches(uint32_t ssrc, uint32_t bytes, uint32_t header_bytes, + uint32_t padding, uint32_t packets, uint32_t retransmits, + uint32_t fec) { + return ssrc_ == ssrc && + counters_.bytes == bytes && + counters_.header_bytes == header_bytes && + counters_.padding_bytes == padding && + counters_.packets == packets && + counters_.retransmitted_packets == retransmits && + counters_.fec_packets == fec; + } + + } callback; + + const uint8_t kRedPayloadType = 96; + const uint8_t kUlpfecPayloadType = 97; + const uint32_t kMaxPaddingSize = 224; + char payload_name[RTP_PAYLOAD_NAME_SIZE] = "GENERIC"; + const uint8_t payload_type = 127; + ASSERT_EQ(0, rtp_sender_->RegisterPayload(payload_name, payload_type, 90000, + 0, 1500)); + uint8_t payload[] = {47, 11, 32, 93, 89}; + rtp_sender_->SetStorePacketsStatus(true, 1); + uint32_t ssrc = rtp_sender_->SSRC(); + + rtp_sender_->RegisterRtpStatisticsCallback(&callback); + + // Send a frame. + ASSERT_EQ(0, rtp_sender_->SendOutgoingData(kVideoFrameKey, payload_type, 1234, + 4321, payload, sizeof(payload), + NULL)); + + // {bytes = 6, header = 12, padding = 0, packets = 1, retrans = 0, fec = 0} + EXPECT_TRUE(callback.Matches(ssrc, 6, 12, 0, 1, 0, 0)); + + // Retransmit a frame. + uint16_t seqno = rtp_sender_->SequenceNumber() - 1; + rtp_sender_->ReSendPacket(seqno, 0); + + // bytes = 6, header = 12, padding = 0, packets = 2, retrans = 1, fec = 0} + EXPECT_TRUE(callback.Matches(ssrc, 6, 12, 0, 2, 1, 0)); + + // Send padding. + rtp_sender_->TimeToSendPadding(kMaxPaddingSize); + // {bytes = 6, header = 24, padding = 224, packets = 3, retrans = 1, fec = 0} + EXPECT_TRUE(callback.Matches(ssrc, 6, 24, 224, 3, 1, 0)); + + // Send FEC. + rtp_sender_->SetGenericFECStatus(true, kRedPayloadType, kUlpfecPayloadType); + FecProtectionParams fec_params; + fec_params.fec_mask_type = kFecMaskRandom; + fec_params.fec_rate = 1; + fec_params.max_fec_frames = 1; + fec_params.use_uep_protection = false; + rtp_sender_->SetFecParameters(&fec_params, &fec_params); + ASSERT_EQ(0, rtp_sender_->SendOutgoingData(kVideoFrameDelta, payload_type, + 1234, 4321, payload, + sizeof(payload), NULL)); + + // {bytes = 34, header = 48, padding = 224, packets = 5, retrans = 1, fec = 1} + EXPECT_TRUE(callback.Matches(ssrc, 34, 48, 224, 5, 1, 1)); + + rtp_sender_->RegisterRtpStatisticsCallback(NULL); +} + TEST_F(RtpSenderAudioTest, BuildRTPPacketWithAudioLevelExtension) { EXPECT_EQ(0, rtp_sender_->SetAudioLevelIndicationStatus(true, kAudioLevelExtensionId)); diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc index 57f64995114d..b96020f71e5c 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc @@ -14,6 +14,7 @@ #include #include +#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h" #include "webrtc/modules/rtp_rtcp/source/producer_fec.h" #include "webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h" #include "webrtc/modules/rtp_rtcp/source/rtp_format_h264.h" @@ -33,27 +34,26 @@ struct RtpPacket { RTPSenderVideo::RTPSenderVideo(const int32_t id, Clock* clock, - RTPSenderInterface* rtpSender) : - _id(id), - _rtpSender(*rtpSender), - _sendVideoCritsect(CriticalSectionWrapper::CreateCriticalSection()), + RTPSenderInterface* rtpSender) + : _id(id), + _rtpSender(*rtpSender), + _sendVideoCritsect(CriticalSectionWrapper::CreateCriticalSection()), + _videoType(kRtpVideoGeneric), + _videoCodecInformation(NULL), + _maxBitrate(0), + _retransmissionSettings(kRetransmitBaseLayer), - _videoType(kRtpVideoGeneric), - _videoCodecInformation(NULL), - _maxBitrate(0), - _retransmissionSettings(kRetransmitBaseLayer), - - // Generic FEC - _fec(id), - _fecEnabled(false), - _payloadTypeRED(-1), - _payloadTypeFEC(-1), - _numberFirstPartition(0), - delta_fec_params_(), - key_fec_params_(), - producer_fec_(&_fec), - _fecOverheadRate(clock), - _videoBitrate(clock) { + // Generic FEC + _fec(id), + _fecEnabled(false), + _payloadTypeRED(-1), + _payloadTypeFEC(-1), + _numberFirstPartition(0), + delta_fec_params_(), + key_fec_params_(), + producer_fec_(&_fec), + _fecOverheadRate(clock, NULL), + _videoBitrate(clock, NULL) { memset(&delta_fec_params_, 0, sizeof(delta_fec_params_)); memset(&key_fec_params_, 0, sizeof(key_fec_params_)); delta_fec_params_.max_fec_frames = key_fec_params_.max_fec_frames = 1; @@ -257,8 +257,13 @@ RTPSenderVideo::FECPacketOverhead() const { if (_fecEnabled) { - return ForwardErrorCorrection::PacketOverhead() + - REDForFECHeaderLength; + // Overhead is FEC headers plus RED for FEC header plus anything in RTP + // header beyond the 12 bytes base header (CSRC list, extensions...) + // This reason for the header extensions to be included here is that + // from an FEC viewpoint, they are part of the payload to be protected. + // (The base RTP header is already protected by the FEC header.) + return ForwardErrorCorrection::PacketOverhead() + REDForFECHeaderLength + + (_rtpSender.RTPHeaderLength() - kRtpHeaderSize); } return 0; } diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_utility.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_utility.cc index 18297ef5efc7..c950c93a25b0 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_utility.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_utility.cc @@ -125,16 +125,12 @@ bool StringCompare(const char* str1, const char* str2, } #endif -#if !defined(WEBRTC_LITTLE_ENDIAN) && !defined(WEBRTC_BIG_ENDIAN) -#error Either WEBRTC_LITTLE_ENDIAN or WEBRTC_BIG_ENDIAN must be defined -#endif - /* for RTP/RTCP All integer fields are carried in network byte order, that is, most significant byte (octet) first. AKA big-endian. */ void AssignUWord32ToBuffer(uint8_t* dataBuffer, uint32_t value) { -#if defined(WEBRTC_LITTLE_ENDIAN) +#if defined(WEBRTC_ARCH_LITTLE_ENDIAN) dataBuffer[0] = static_cast(value >> 24); dataBuffer[1] = static_cast(value >> 16); dataBuffer[2] = static_cast(value >> 8); @@ -146,7 +142,7 @@ void AssignUWord32ToBuffer(uint8_t* dataBuffer, uint32_t value) { } void AssignUWord24ToBuffer(uint8_t* dataBuffer, uint32_t value) { -#if defined(WEBRTC_LITTLE_ENDIAN) +#if defined(WEBRTC_ARCH_LITTLE_ENDIAN) dataBuffer[0] = static_cast(value >> 16); dataBuffer[1] = static_cast(value >> 8); dataBuffer[2] = static_cast(value); @@ -158,7 +154,7 @@ void AssignUWord24ToBuffer(uint8_t* dataBuffer, uint32_t value) { } void AssignUWord16ToBuffer(uint8_t* dataBuffer, uint16_t value) { -#if defined(WEBRTC_LITTLE_ENDIAN) +#if defined(WEBRTC_ARCH_LITTLE_ENDIAN) dataBuffer[0] = static_cast(value >> 8); dataBuffer[1] = static_cast(value); #else @@ -168,7 +164,7 @@ void AssignUWord16ToBuffer(uint8_t* dataBuffer, uint16_t value) { } uint16_t BufferToUWord16(const uint8_t* dataBuffer) { -#if defined(WEBRTC_LITTLE_ENDIAN) +#if defined(WEBRTC_ARCH_LITTLE_ENDIAN) return (dataBuffer[0] << 8) + dataBuffer[1]; #else return *reinterpret_cast(dataBuffer); @@ -180,7 +176,7 @@ uint32_t BufferToUWord24(const uint8_t* dataBuffer) { } uint32_t BufferToUWord32(const uint8_t* dataBuffer) { -#if defined(WEBRTC_LITTLE_ENDIAN) +#if defined(WEBRTC_ARCH_LITTLE_ENDIAN) return (dataBuffer[0] << 24) + (dataBuffer[1] << 16) + (dataBuffer[2] << 8) + dataBuffer[3]; #else @@ -395,9 +391,11 @@ bool RTPHeaderParser::Parse(RTPHeader& header, // If in effect, MAY be omitted for those packets for which the offset // is zero. + header.extension.hasTransmissionTimeOffset = false; header.extension.transmissionTimeOffset = 0; // May not be present in packet. + header.extension.hasAbsoluteSendTime = false; header.extension.absoluteSendTime = 0; if (X) { @@ -494,6 +492,7 @@ void RTPHeaderParser::ParseOneByteExtensionHeader( // Negative offset, correct sign for Word24 to Word32. header.extension.transmissionTimeOffset |= 0xFF000000; } + header.extension.hasTransmissionTimeOffset = true; break; } case kRtpExtensionAudioLevel: { @@ -528,6 +527,7 @@ void RTPHeaderParser::ParseOneByteExtensionHeader( absoluteSendTime += *ptr++ << 8; absoluteSendTime += *ptr++; header.extension.absoluteSendTime = absoluteSendTime; + header.extension.hasAbsoluteSendTime = true; break; } default: { diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/test/bwe_standalone.gypi b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/test/bwe_standalone.gypi index 25c6ff62ec4e..25d72642c19d 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/test/bwe_standalone.gypi +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/test/bwe_standalone.gypi @@ -17,10 +17,6 @@ 'udp_transport', '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', ], - 'include_dirs': [ - '../interface', - '../../interface', - ], 'sources': [ 'BWEStandAlone/BWEStandAlone.cc', 'BWEStandAlone/TestLoadGenerator.cc', @@ -36,11 +32,6 @@ }, ], ], - - 'include_dirs': [ - ], - 'link_settings': { - }, }, { @@ -53,14 +44,6 @@ 'include_dirs': [ '/opt/matlab2010a/extern/include', ], - # 'direct_dependent_settings': { - # 'defines': [ - # 'MATLAB', - # ], - # 'include_dirs': [ - # 'BWEStandAlone', - # ], - # }, 'export_dependent_settings': [ 'matlab_plotting_include', ], @@ -93,9 +76,6 @@ 'target_name': 'matlab_plotting_include', 'type': 'none', 'direct_dependent_settings': { - 'defines': [ -# 'MATLAB', - ], 'include_dirs': [ 'BWEStandAlone', ], diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/test/testAPI/test_api.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/test/testAPI/test_api.cc index 78df647dabb9..4f43e6ac4a4a 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/test/testAPI/test_api.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/test/testAPI/test_api.cc @@ -116,7 +116,7 @@ TEST_F(RtpRtcpAPITest, RTCP) { TEST_F(RtpRtcpAPITest, RtxSender) { unsigned int ssrc = 0; - RtxMode rtx_mode = kRtxOff; + int rtx_mode = kRtxOff; const int kRtxPayloadType = 119; int payload_type = -1; EXPECT_EQ(0, module->SetRTXSendStatus(kRtxRetransmitted, true, 1)); diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc index 3df06a210961..1e715187eefa 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc @@ -336,7 +336,7 @@ TEST_F(RtpRtcpRtcpTest, RTCP) { StreamStatistician *statistician = receive_statistics2_->GetStatistician(reportBlockReceived.sourceSSRC); - StreamStatistician::Statistics stats; + RtcpStatistics stats; EXPECT_TRUE(statistician->GetStatistics(&stats, true)); EXPECT_EQ(0, stats.fraction_lost); EXPECT_EQ((uint32_t)0, stats.cumulative_lost); diff --git a/media/webrtc/trunk/webrtc/modules/utility/interface/helpers_android.h b/media/webrtc/trunk/webrtc/modules/utility/interface/helpers_android.h new file mode 100644 index 000000000000..d0796ec0a732 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/utility/interface/helpers_android.h @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_UTILITY_INTERFACE_HELPERS_ANDROID_H_ +#define WEBRTC_MODULES_UTILITY_INTERFACE_HELPERS_ANDROID_H_ + +#include + +namespace webrtc { + +// Attach thread to JVM if necessary and detach at scope end if originally +// attached. +class AttachThreadScoped { + public: + explicit AttachThreadScoped(JavaVM* jvm); + ~AttachThreadScoped(); + JNIEnv* env(); + + private: + bool attached_; + JavaVM* jvm_; + JNIEnv* env_; +}; + +} // namespace webrtc + +#endif // WEBRTC_MODULES_UTILITY_INTERFACE_HELPERS_ANDROID_H_ diff --git a/media/webrtc/trunk/webrtc/modules/utility/interface/process_thread.h b/media/webrtc/trunk/webrtc/modules/utility/interface/process_thread.h index cdbb4d39127b..4db92a308a84 100644 --- a/media/webrtc/trunk/webrtc/modules/utility/interface/process_thread.h +++ b/media/webrtc/trunk/webrtc/modules/utility/interface/process_thread.h @@ -25,7 +25,7 @@ public: virtual int32_t Start() = 0; virtual int32_t Stop() = 0; - virtual int32_t RegisterModule(const Module* module) = 0; + virtual int32_t RegisterModule(Module* module) = 0; virtual int32_t DeRegisterModule(const Module* module) = 0; protected: virtual ~ProcessThread(); diff --git a/media/webrtc/trunk/webrtc/modules/utility/source/file_recorder_impl.cc b/media/webrtc/trunk/webrtc/modules/utility/source/file_recorder_impl.cc index 16faa58d3fce..032869c72b84 100644 --- a/media/webrtc/trunk/webrtc/modules/utility/source/file_recorder_impl.cc +++ b/media/webrtc/trunk/webrtc/modules/utility/source/file_recorder_impl.cc @@ -342,31 +342,6 @@ int32_t FileRecorderImpl::WriteEncodedAudioData( #ifdef WEBRTC_MODULE_UTILITY_VIDEO -class AudioFrameFileInfo -{ - public: - AudioFrameFileInfo(const int8_t* audioData, - const uint16_t audioSize, - const uint16_t audioMS, - const TickTime& playoutTS) - : _audioData(), _audioSize(audioSize), _audioMS(audioMS), - _playoutTS(playoutTS) - { - if(audioSize > MAX_AUDIO_BUFFER_IN_BYTES) - { - assert(false); - _audioSize = 0; - return; - } - memcpy(_audioData, audioData, audioSize); - }; - // TODO (hellner): either turn into a struct or provide get/set functions. - int8_t _audioData[MAX_AUDIO_BUFFER_IN_BYTES]; - uint16_t _audioSize; - uint16_t _audioMS; - TickTime _playoutTS; -}; - AviRecorder::AviRecorder(uint32_t instanceID, FileFormats fileFormat) : FileRecorderImpl(instanceID, fileFormat), _videoOnly(false), @@ -545,49 +520,39 @@ int32_t AviRecorder::ProcessAudio() { // Syncronize audio to the current frame to process by throwing away // audio samples with older timestamp than the video frame. - uint32_t numberOfAudioElements = - _audioFramesToWrite.GetSize(); - for (uint32_t i = 0; i < numberOfAudioElements; ++i) + size_t numberOfAudioElements = + _audioFramesToWrite.size(); + for (size_t i = 0; i < numberOfAudioElements; ++i) { - AudioFrameFileInfo* frameInfo = - (AudioFrameFileInfo*)_audioFramesToWrite.First()->GetItem(); - if(frameInfo) + AudioFrameFileInfo* frameInfo = _audioFramesToWrite.front(); + if(TickTime::TicksToMilliseconds( + frameInfo->_playoutTS.Ticks()) < + frameToProcess->render_time_ms()) { - if(TickTime::TicksToMilliseconds( - frameInfo->_playoutTS.Ticks()) < - frameToProcess->render_time_ms()) - { - delete frameInfo; - _audioFramesToWrite.PopFront(); - } else - { - break; - } + delete frameInfo; + _audioFramesToWrite.pop_front(); + } else + { + break; } } } } // Write all audio up to current timestamp. int32_t error = 0; - uint32_t numberOfAudioElements = _audioFramesToWrite.GetSize(); - for (uint32_t i = 0; i < numberOfAudioElements; ++i) + size_t numberOfAudioElements = _audioFramesToWrite.size(); + for (size_t i = 0; i < numberOfAudioElements; ++i) { - AudioFrameFileInfo* frameInfo = - (AudioFrameFileInfo*)_audioFramesToWrite.First()->GetItem(); - if(frameInfo) + AudioFrameFileInfo* frameInfo = _audioFramesToWrite.front(); + if((TickTime::Now() - frameInfo->_playoutTS).Milliseconds() > 0) { - if((TickTime::Now() - frameInfo->_playoutTS).Milliseconds() > 0) - { - _moduleFile->IncomingAudioData(frameInfo->_audioData, - frameInfo->_audioSize); - _writtenAudioMS += frameInfo->_audioMS; - delete frameInfo; - _audioFramesToWrite.PopFront(); - } else { - break; - } + _moduleFile->IncomingAudioData(frameInfo->_audioData, + frameInfo->_audioSize); + _writtenAudioMS += frameInfo->_audioMS; + delete frameInfo; + _audioFramesToWrite.pop_front(); } else { - _audioFramesToWrite.PopFront(); + break; } } return error; @@ -762,7 +727,7 @@ int32_t AviRecorder::WriteEncodedAudioData( { return -1; } - if (_audioFramesToWrite.GetSize() > kMaxAudioBufferQueueLength) + if (_audioFramesToWrite.size() > kMaxAudioBufferQueueLength) { StopRecording(); return -1; @@ -771,15 +736,15 @@ int32_t AviRecorder::WriteEncodedAudioData( if(playoutTS) { - _audioFramesToWrite.PushBack(new AudioFrameFileInfo(audioBuffer, - bufferLength, - millisecondsOfData, - *playoutTS)); + _audioFramesToWrite.push_back(new AudioFrameFileInfo(audioBuffer, + bufferLength, + millisecondsOfData, + *playoutTS)); } else { - _audioFramesToWrite.PushBack(new AudioFrameFileInfo(audioBuffer, - bufferLength, - millisecondsOfData, - TickTime::Now())); + _audioFramesToWrite.push_back(new AudioFrameFileInfo(audioBuffer, + bufferLength, + millisecondsOfData, + TickTime::Now())); } _timeEvent.Set(); return 0; diff --git a/media/webrtc/trunk/webrtc/modules/utility/source/file_recorder_impl.h b/media/webrtc/trunk/webrtc/modules/utility/source/file_recorder_impl.h index 0b7290eddce2..53fd26bc25d4 100644 --- a/media/webrtc/trunk/webrtc/modules/utility/source/file_recorder_impl.h +++ b/media/webrtc/trunk/webrtc/modules/utility/source/file_recorder_impl.h @@ -15,6 +15,8 @@ #ifndef WEBRTC_MODULES_UTILITY_SOURCE_FILE_RECORDER_IMPL_H_ #define WEBRTC_MODULES_UTILITY_SOURCE_FILE_RECORDER_IMPL_H_ +#include + #include "webrtc/common_audio/resampler/include/resampler.h" #include "webrtc/common_types.h" #include "webrtc/engine_configurations.h" @@ -40,6 +42,8 @@ enum { MAX_AUDIO_BUFFER_IN_SAMPLES = 60*32}; enum { MAX_AUDIO_BUFFER_IN_BYTES = MAX_AUDIO_BUFFER_IN_SAMPLES*2}; enum { kMaxAudioBufferQueueLength = 100 }; +class CriticalSectionWrapper; + class FileRecorderImpl : public FileRecorder { public: @@ -103,6 +107,31 @@ private: #ifdef WEBRTC_MODULE_UTILITY_VIDEO +class AudioFrameFileInfo +{ + public: + AudioFrameFileInfo(const int8_t* audioData, + const uint16_t audioSize, + const uint16_t audioMS, + const TickTime& playoutTS) + : _audioData(), _audioSize(audioSize), _audioMS(audioMS), + _playoutTS(playoutTS) + { + if(audioSize > MAX_AUDIO_BUFFER_IN_BYTES) + { + assert(false); + _audioSize = 0; + return; + } + memcpy(_audioData, audioData, audioSize); + }; + // TODO (hellner): either turn into a struct or provide get/set functions. + int8_t _audioData[MAX_AUDIO_BUFFER_IN_BYTES]; + uint16_t _audioSize; + uint16_t _audioMS; + TickTime _playoutTS; +}; + class AviRecorder : public FileRecorderImpl { public: @@ -126,6 +155,7 @@ protected: uint16_t millisecondsOfData, const TickTime* playoutTS); private: + typedef std::list AudioInfoList; static bool Run(ThreadObj threadObj); bool Process(); @@ -141,7 +171,7 @@ private: VideoCodec _videoCodecInst; bool _videoOnly; - ListWrapper _audioFramesToWrite; + AudioInfoList _audioFramesToWrite; bool _firstAudioFrameReceived; VideoFramesQueue* _videoFramesQueue; diff --git a/media/webrtc/trunk/webrtc/modules/utility/source/helpers_android.cc b/media/webrtc/trunk/webrtc/modules/utility/source/helpers_android.cc new file mode 100644 index 000000000000..6acc77ea1ed1 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/utility/source/helpers_android.cc @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/utility/interface/helpers_android.h" + +#include +#include + +namespace webrtc { + +AttachThreadScoped::AttachThreadScoped(JavaVM* jvm) + : attached_(false), jvm_(jvm), env_(NULL) { + jint ret_val = jvm->GetEnv(reinterpret_cast(&env_), JNI_VERSION_1_4); + if (ret_val == JNI_EDETACHED) { + // Attach the thread to the Java VM. + ret_val = jvm_->AttachCurrentThread(&env_, NULL); + attached_ = ret_val == JNI_OK; + assert(attached_); + } +} + +AttachThreadScoped::~AttachThreadScoped() { + if (attached_ && (jvm_->DetachCurrentThread() < 0)) { + assert(false); + } +} + +JNIEnv* AttachThreadScoped::env() { return env_; } + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/utility/source/process_thread_impl.cc b/media/webrtc/trunk/webrtc/modules/utility/source/process_thread_impl.cc index 08979d24935e..dd5c42cf4bea 100644 --- a/media/webrtc/trunk/webrtc/modules/utility/source/process_thread_impl.cc +++ b/media/webrtc/trunk/webrtc/modules/utility/source/process_thread_impl.cc @@ -87,25 +87,23 @@ int32_t ProcessThreadImpl::Stop() return 0; } -int32_t ProcessThreadImpl::RegisterModule(const Module* module) +int32_t ProcessThreadImpl::RegisterModule(Module* module) { CriticalSectionScoped lock(_critSectModules); // Only allow module to be registered once. - ListItem* item = _modules.First(); - for(uint32_t i = 0; i < _modules.GetSize() && item; i++) - { - if(module == item->GetItem()) + for (ModuleList::iterator iter = _modules.begin(); + iter != _modules.end(); ++iter) { + if(module == *iter) { return -1; } - item = _modules.Next(item); } - _modules.PushFront(module); + _modules.push_front(module); WEBRTC_TRACE(kTraceInfo, kTraceUtility, -1, "number of registered modules has increased to %d", - _modules.GetSize()); + _modules.size()); // Wake the thread calling ProcessThreadImpl::Process() to update the // waiting time. The waiting time for the just registered module may be // shorter than all other registered modules. @@ -116,19 +114,16 @@ int32_t ProcessThreadImpl::RegisterModule(const Module* module) int32_t ProcessThreadImpl::DeRegisterModule(const Module* module) { CriticalSectionScoped lock(_critSectModules); - - ListItem* item = _modules.First(); - for(uint32_t i = 0; i < _modules.GetSize() && item; i++) - { - if(module == item->GetItem()) + for (ModuleList::iterator iter = _modules.begin(); + iter != _modules.end(); ++iter) { + if(module == *iter) { - int res = _modules.Erase(item); + _modules.erase(iter); WEBRTC_TRACE(kTraceInfo, kTraceUtility, -1, "number of registered modules has decreased to %d", - _modules.GetSize()); - return res; + _modules.size()); + return 0; } - item = _modules.Next(item); } return -1; } @@ -145,16 +140,13 @@ bool ProcessThreadImpl::Process() int32_t minTimeToNext = 100; { CriticalSectionScoped lock(_critSectModules); - ListItem* item = _modules.First(); - for(uint32_t i = 0; i < _modules.GetSize() && item; i++) - { - int32_t timeToNext = - static_cast(item->GetItem())->TimeUntilNextProcess(); + for (ModuleList::iterator iter = _modules.begin(); + iter != _modules.end(); ++iter) { + int32_t timeToNext = (*iter)->TimeUntilNextProcess(); if(minTimeToNext > timeToNext) { minTimeToNext = timeToNext; } - item = _modules.Next(item); } } @@ -172,16 +164,13 @@ bool ProcessThreadImpl::Process() } { CriticalSectionScoped lock(_critSectModules); - ListItem* item = _modules.First(); - for(uint32_t i = 0; i < _modules.GetSize() && item; i++) - { - int32_t timeToNext = - static_cast(item->GetItem())->TimeUntilNextProcess(); + for (ModuleList::iterator iter = _modules.begin(); + iter != _modules.end(); ++iter) { + int32_t timeToNext = (*iter)->TimeUntilNextProcess(); if(timeToNext < 1) { - static_cast(item->GetItem())->Process(); + (*iter)->Process(); } - item = _modules.Next(item); } } return true; diff --git a/media/webrtc/trunk/webrtc/modules/utility/source/process_thread_impl.h b/media/webrtc/trunk/webrtc/modules/utility/source/process_thread_impl.h index d1913c47c494..14fbc18a2a5d 100644 --- a/media/webrtc/trunk/webrtc/modules/utility/source/process_thread_impl.h +++ b/media/webrtc/trunk/webrtc/modules/utility/source/process_thread_impl.h @@ -11,10 +11,11 @@ #ifndef WEBRTC_MODULES_UTILITY_SOURCE_PROCESS_THREAD_IMPL_H_ #define WEBRTC_MODULES_UTILITY_SOURCE_PROCESS_THREAD_IMPL_H_ +#include + #include "webrtc/modules/utility/interface/process_thread.h" #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" #include "webrtc/system_wrappers/interface/event_wrapper.h" -#include "webrtc/system_wrappers/interface/list_wrapper.h" #include "webrtc/system_wrappers/interface/thread_wrapper.h" #include "webrtc/typedefs.h" @@ -28,7 +29,7 @@ public: virtual int32_t Start(); virtual int32_t Stop(); - virtual int32_t RegisterModule(const Module* module); + virtual int32_t RegisterModule(Module* module); virtual int32_t DeRegisterModule(const Module* module); protected: @@ -37,9 +38,10 @@ protected: bool Process(); private: + typedef std::list ModuleList; EventWrapper& _timeEvent; CriticalSectionWrapper* _critSectModules; - ListWrapper _modules; + ModuleList _modules; ThreadWrapper* _thread; }; } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/utility/source/rtp_dump_impl.cc b/media/webrtc/trunk/webrtc/modules/utility/source/rtp_dump_impl.cc index 0225836f8ae7..0ffee6af6996 100644 --- a/media/webrtc/trunk/webrtc/modules/utility/source/rtp_dump_impl.cc +++ b/media/webrtc/trunk/webrtc/modules/utility/source/rtp_dump_impl.cc @@ -245,37 +245,25 @@ inline uint32_t RtpDumpImpl::GetTimeInMS() const gettimeofday(&tv, &tz); val = tv.tv_sec * 1000 + tv.tv_usec / 1000; return val; -#else - #error Either _WIN32 or LINUX or WEBRTC_MAC has to be defined! - assert(false); - return 0; #endif } inline uint32_t RtpDumpImpl::RtpDumpHtonl(uint32_t x) const { -#if defined(WEBRTC_BIG_ENDIAN) +#if defined(WEBRTC_ARCH_BIG_ENDIAN) return x; -#elif defined(WEBRTC_LITTLE_ENDIAN) +#elif defined(WEBRTC_ARCH_LITTLE_ENDIAN) return (x >> 24) + ((((x >> 16) & 0xFF) << 8) + ((((x >> 8) & 0xFF) << 16) + ((x & 0xFF) << 24))); -#else -#error Either WEBRTC_BIG_ENDIAN or WEBRTC_LITTLE_ENDIAN has to be defined! - assert(false); - return 0; #endif } inline uint16_t RtpDumpImpl::RtpDumpHtons(uint16_t x) const { -#if defined(WEBRTC_BIG_ENDIAN) +#if defined(WEBRTC_ARCH_BIG_ENDIAN) return x; -#elif defined(WEBRTC_LITTLE_ENDIAN) +#elif defined(WEBRTC_ARCH_LITTLE_ENDIAN) return (x >> 8) + ((x & 0xFF) << 8); -#else - #error Either WEBRTC_BIG_ENDIAN or WEBRTC_LITTLE_ENDIAN has to be defined! - assert(false); - return 0; #endif } } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/utility/source/utility.gypi b/media/webrtc/trunk/webrtc/modules/utility/source/utility.gypi index 5f1d50c397dc..2f09657da8ba 100644 --- a/media/webrtc/trunk/webrtc/modules/utility/source/utility.gypi +++ b/media/webrtc/trunk/webrtc/modules/utility/source/utility.gypi @@ -17,22 +17,11 @@ '<(webrtc_root)/common_audio/common_audio.gyp:common_audio', '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', ], - 'include_dirs': [ - '../interface', - '../../interface', - '../../media_file/interface', - ], - 'direct_dependent_settings': { - 'include_dirs': [ - '../interface', - '../../interface', - '../../audio_coding/main/interface', - ], - }, 'sources': [ '../interface/audio_frame_operations.h', '../interface/file_player.h', '../interface/file_recorder.h', + '../interface/helpers_android.h', '../interface/process_thread.h', '../interface/rtp_dump.h', 'audio_frame_operations.cc', @@ -42,6 +31,7 @@ 'file_player_impl.h', 'file_recorder_impl.cc', 'file_recorder_impl.h', + 'helpers_android.cc', 'process_thread_impl.cc', 'process_thread_impl.h', 'rtp_dump_impl.cc', @@ -49,16 +39,9 @@ ], 'conditions': [ ['enable_video==1', { - # Adds support for video recording. - 'defines': [ - 'WEBRTC_MODULE_UTILITY_VIDEO', - ], 'dependencies': [ 'webrtc_video_coding', ], - 'include_dirs': [ - '../../video_coding/main/interface', - ], 'sources': [ 'frame_scaler.cc', 'video_coder.cc', diff --git a/media/webrtc/trunk/webrtc/modules/utility/source/video_frames_queue.cc b/media/webrtc/trunk/webrtc/modules/utility/source/video_frames_queue.cc index d3d37bec2d04..53b446501c66 100644 --- a/media/webrtc/trunk/webrtc/modules/utility/source/video_frames_queue.cc +++ b/media/webrtc/trunk/webrtc/modules/utility/source/video_frames_queue.cc @@ -21,36 +21,24 @@ namespace webrtc { VideoFramesQueue::VideoFramesQueue() - : _incomingFrames(), - _renderDelayMs(10) + : _renderDelayMs(10) { } VideoFramesQueue::~VideoFramesQueue() { - while (!_incomingFrames.Empty()) { - ListItem* item = _incomingFrames.First(); - if (item) { - I420VideoFrame* ptrFrame = static_cast(item->GetItem()); - assert(ptrFrame != NULL); - delete ptrFrame; - } - _incomingFrames.Erase(item); + for (FrameList::iterator iter = _incomingFrames.begin(); + iter != _incomingFrames.end(); ++iter) { + delete *iter; } - while (!_emptyFrames.Empty()) { - ListItem* item = _emptyFrames.First(); - if (item) { - I420VideoFrame* ptrFrame = - static_cast(item->GetItem()); - assert(ptrFrame != NULL); - delete ptrFrame; - } - _emptyFrames.Erase(item); + for (FrameList::iterator iter = _emptyFrames.begin(); + iter != _emptyFrames.end(); ++iter) { + delete *iter; } } int32_t VideoFramesQueue::AddFrame(const I420VideoFrame& newFrame) { if (newFrame.native_handle() != NULL) { - _incomingFrames.PushBack(new TextureVideoFrame( + _incomingFrames.push_back(new TextureVideoFrame( static_cast(newFrame.native_handle()), newFrame.width(), newFrame.height(), @@ -61,15 +49,12 @@ int32_t VideoFramesQueue::AddFrame(const I420VideoFrame& newFrame) { I420VideoFrame* ptrFrameToAdd = NULL; // Try to re-use a VideoFrame. Only allocate new memory if it is necessary. - if (!_emptyFrames.Empty()) { - ListItem* item = _emptyFrames.First(); - if (item) { - ptrFrameToAdd = static_cast(item->GetItem()); - _emptyFrames.Erase(item); - } + if (!_emptyFrames.empty()) { + ptrFrameToAdd = _emptyFrames.front(); + _emptyFrames.pop_front(); } if (!ptrFrameToAdd) { - if (_emptyFrames.GetSize() + _incomingFrames.GetSize() > + if (_emptyFrames.size() + _incomingFrames.size() > KMaxNumberOfFrames) { WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1, "%s: too many frames, limit: %d", __FUNCTION__, @@ -79,17 +64,12 @@ int32_t VideoFramesQueue::AddFrame(const I420VideoFrame& newFrame) { WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, -1, "%s: allocating buffer %d", __FUNCTION__, - _emptyFrames.GetSize() + _incomingFrames.GetSize()); + _emptyFrames.size() + _incomingFrames.size()); ptrFrameToAdd = new I420VideoFrame(); - if (!ptrFrameToAdd) { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1, - "%s: could not create new frame for", __FUNCTION__); - return -1; - } } ptrFrameToAdd->CopyFrame(newFrame); - _incomingFrames.PushBack(ptrFrameToAdd); + _incomingFrames.push_back(ptrFrameToAdd); return 0; } @@ -99,20 +79,16 @@ int32_t VideoFramesQueue::AddFrame(const I420VideoFrame& newFrame) { // Recycle all frames that are older than the most recent frame. I420VideoFrame* VideoFramesQueue::FrameToRecord() { I420VideoFrame* ptrRenderFrame = NULL; - ListItem* item = _incomingFrames.First(); - while(item) { - I420VideoFrame* ptrOldestFrameInList = - static_cast(item->GetItem()); + for (FrameList::iterator iter = _incomingFrames.begin(); + iter != _incomingFrames.end(); ++iter) { + I420VideoFrame* ptrOldestFrameInList = *iter; if (ptrOldestFrameInList->render_time_ms() <= TickTime::MillisecondTimestamp() + _renderDelayMs) { - if (ptrRenderFrame) { - // List is traversed beginning to end. If ptrRenderFrame is not - // NULL it must be the first, and thus oldest, VideoFrame in the - // queue. It can be recycled. - ReturnFrame(ptrRenderFrame); - _incomingFrames.PopFront(); - } - item = _incomingFrames.Next(item); + // List is traversed beginning to end. If ptrRenderFrame is not + // NULL it must be the first, and thus oldest, VideoFrame in the + // queue. It can be recycled. + ReturnFrame(ptrRenderFrame); + iter = _incomingFrames.erase(iter); ptrRenderFrame = ptrOldestFrameInList; } else { // All VideoFrames following this one will be even newer. No match @@ -131,7 +107,7 @@ int32_t VideoFramesQueue::ReturnFrame(I420VideoFrame* ptrOldFrame) { ptrOldFrame->set_height(0); ptrOldFrame->set_render_time_ms(0); ptrOldFrame->ResetSize(); - _emptyFrames.PushBack(ptrOldFrame); + _emptyFrames.push_back(ptrOldFrame); } else { delete ptrOldFrame; } diff --git a/media/webrtc/trunk/webrtc/modules/utility/source/video_frames_queue.h b/media/webrtc/trunk/webrtc/modules/utility/source/video_frames_queue.h index 4316bf7c047b..afc64d9b71e3 100644 --- a/media/webrtc/trunk/webrtc/modules/utility/source/video_frames_queue.h +++ b/media/webrtc/trunk/webrtc/modules/utility/source/video_frames_queue.h @@ -13,9 +13,10 @@ #ifdef WEBRTC_MODULE_UTILITY_VIDEO +#include + #include "webrtc/common_video/interface/i420_video_frame.h" #include "webrtc/engine_configurations.h" -#include "webrtc/system_wrappers/interface/list_wrapper.h" #include "webrtc/typedefs.h" namespace webrtc { @@ -42,6 +43,7 @@ class VideoFramesQueue { int32_t ReturnFrame(I420VideoFrame* ptrOldFrame); private: + typedef std::list FrameList; // Don't allow the buffer to expand beyond KMaxNumberOfFrames VideoFrames. // 300 frames correspond to 10 seconds worth of frames at 30 fps. enum {KMaxNumberOfFrames = 300}; @@ -49,9 +51,9 @@ class VideoFramesQueue { // List of VideoFrame pointers. The list is sorted in the order of when the // VideoFrame was inserted into the list. The first VideoFrame in the list // was inserted first. - ListWrapper _incomingFrames; + FrameList _incomingFrames; // A list of frames that are free to be re-used. - ListWrapper _emptyFrames; + FrameList _emptyFrames; // Estimated render delay. uint32_t _renderDelayMs; diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.cc b/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.cc index ac5467a7b710..bd37fe080949 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.cc +++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.cc @@ -10,9 +10,13 @@ #include "webrtc/modules/video_capture/android/device_info_android.h" -#include +#include +#include +#include +#include #include "webrtc/modules/video_capture/android/video_capture_android.h" +#include "webrtc/system_wrappers/interface/logging.h" #include "webrtc/system_wrappers/interface/ref_count.h" #include "webrtc/system_wrappers/interface/trace.h" @@ -24,61 +28,168 @@ namespace webrtc namespace videocapturemodule { -static jclass g_capabilityClass = NULL; - -// static -void DeviceInfoAndroid::SetAndroidCaptureClasses(jclass capabilityClass) { - g_capabilityClass = capabilityClass; +static std::string ResolutionsToString( + const std::vector >& pairs) { + std::stringstream stream; + for (size_t i = 0; i < pairs.size(); ++i) { + if (i > 0) + stream << ", "; + stream << "(" << pairs[i].first << "x" << pairs[i].second << ")"; + } + return stream.str(); } -VideoCaptureModule::DeviceInfo* -VideoCaptureImpl::CreateDeviceInfo (const int32_t id) { - videocapturemodule::DeviceInfoAndroid *deviceInfo = - new videocapturemodule::DeviceInfoAndroid(id); - if (deviceInfo && deviceInfo->Init() != 0) { - delete deviceInfo; - deviceInfo = NULL; +struct AndroidCameraInfo { + std::string name; + int min_mfps, max_mfps; // FPS*1000. + bool front_facing; + int orientation; + std::vector > resolutions; // Pairs are: (width,height). + + std::string ToString() { + std::stringstream stream; + stream << "Name: [" << name << "], mfps: [" << min_mfps << ":" << max_mfps + << "], front_facing: " << front_facing + << ", orientation: " << orientation << ", resolutions: [" + << ResolutionsToString(resolutions) << "]"; + return stream.str(); } - return deviceInfo; +}; + +// Camera info; populated during DeviceInfoAndroid::Initialize() and immutable +// thereafter. +static std::vector* g_camera_info = NULL; + +// Set |*index| to the index of |name| in g_camera_info or return false if no +// match found. +static bool FindCameraIndexByName(const std::string& name, size_t* index) { + for (size_t i = 0; i < g_camera_info->size(); ++i) { + if (g_camera_info->at(i).name == name) { + *index = i; + return true; + } + } + return false; +} + +// Returns a pointer to the named member of g_camera_info, or NULL if no match +// is found. +static AndroidCameraInfo* FindCameraInfoByName(const std::string& name) { + size_t index = 0; + if (FindCameraIndexByName(name, &index)) + return &g_camera_info->at(index); + return NULL; +} + +// static +void DeviceInfoAndroid::Initialize(JNIEnv* jni) { + // TODO(henrike): this "if" would make a lot more sense as an assert, but + // Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetVideoEngine() and + // Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Terminate() conspire to + // prevent this. Once that code is made to only + // VideoEngine::SetAndroidObjects() once per process, this can turn into an + // assert. + if (g_camera_info) + return; + + g_camera_info = new std::vector(); + jclass j_info_class = + jsjni_GetGlobalClassRef("org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid"); + jclass j_cap_class = + jsjni_GetGlobalClassRef("org/webrtc/videoengine/CaptureCapabilityAndroid"); + assert(j_info_class); + assert(j_cap_class); + jmethodID j_initialize = jni->GetStaticMethodID( + j_info_class, "getDeviceInfo", + "()[Lorg/webrtc/videoengine/CaptureCapabilityAndroid;"); + jarray j_camera_caps = static_cast( + jni->CallStaticObjectMethod(j_info_class, j_initialize)); + + const jsize capLength = jni->GetArrayLength(j_camera_caps); + + jfieldID widthField = jni->GetFieldID(j_cap_class, "width", "[I"); + jfieldID heightField = jni->GetFieldID(j_cap_class, "height", "[I"); + jfieldID maxFpsField = jni->GetFieldID(j_cap_class, "maxMilliFPS", "I"); + jfieldID minFpsField = jni->GetFieldID(j_cap_class, "minMilliFPS", "I"); + jfieldID orientationField = jni->GetFieldID(j_cap_class, "orientation", "I"); + jfieldID frontFacingField = jni->GetFieldID(j_cap_class, "frontFacing", "Z"); + jfieldID nameField = + jni->GetFieldID(j_cap_class, "name", "Ljava/lang/String;"); + if (widthField == NULL + || heightField == NULL + || maxFpsField == NULL + || minFpsField == NULL + || orientationField == NULL + || frontFacingField == NULL + || nameField == NULL) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: Failed to get field Id.", __FUNCTION__); + return; + } + + for (jsize i = 0; i < capLength; i++) { + jobject capabilityElement = jni->GetObjectArrayElement( + (jobjectArray) j_camera_caps, + i); + + AndroidCameraInfo info; + jstring camName = static_cast(jni->GetObjectField(capabilityElement, + nameField)); + const char* camChars = jni->GetStringUTFChars(camName, nullptr); + info.name = std::string(camChars); + jni->ReleaseStringUTFChars(camName, camChars); + + info.min_mfps = jni->GetIntField(capabilityElement, minFpsField); + info.max_mfps = jni->GetIntField(capabilityElement, maxFpsField); + info.orientation = jni->GetIntField(capabilityElement, orientationField); + info.front_facing = jni->GetBooleanField(capabilityElement, frontFacingField); + + jintArray widthResArray = + static_cast(jni->GetObjectField(capabilityElement, widthField)); + jintArray heightResArray = + static_cast(jni->GetObjectField(capabilityElement, heightField)); + + const jsize numRes = jni->GetArrayLength(widthResArray); + + jint *widths = jni->GetIntArrayElements(widthResArray, nullptr); + jint *heights = jni->GetIntArrayElements(heightResArray, nullptr); + + for (jsize j = 0; j < numRes; ++j) { + info.resolutions.push_back(std::make_pair(widths[j], heights[j])); + } + g_camera_info->push_back(info); + + jni->ReleaseIntArrayElements(widthResArray, widths, JNI_ABORT); + jni->ReleaseIntArrayElements(heightResArray, heights, JNI_ABORT); + } + + jni->DeleteGlobalRef(j_info_class); + jni->DeleteGlobalRef(j_cap_class); +} + +VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo( + const int32_t id) { + return new videocapturemodule::DeviceInfoAndroid(id); } DeviceInfoAndroid::DeviceInfoAndroid(const int32_t id) : DeviceInfoImpl(id) { } +DeviceInfoAndroid::~DeviceInfoAndroid() { +} + +bool DeviceInfoAndroid::FindCameraIndex(const char* deviceUniqueIdUTF8, + size_t* index) { + return FindCameraIndexByName(deviceUniqueIdUTF8, index); +} + int32_t DeviceInfoAndroid::Init() { return 0; } -DeviceInfoAndroid::~DeviceInfoAndroid() { -} - uint32_t DeviceInfoAndroid::NumberOfDevices() { - AutoLocalJNIFrame jniFrame; - JNIEnv* env = jniFrame.GetEnv(); - if (!env) - return 0; - - jclass javaCmDevInfoClass = jniFrame.GetCmDevInfoClass(); - jobject javaCmDevInfoObject = jniFrame.GetCmDevInfoObject(); - - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id, - "%s GetMethodId", __FUNCTION__); - // get the method ID for the Android Java GetDeviceUniqueName name. - jmethodID cid = env->GetMethodID(javaCmDevInfoClass, - "NumberOfDevices", - "()I"); - - jint numberOfDevices = 0; - if (cid != NULL) { - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id, - "%s Calling Number of devices", __FUNCTION__); - numberOfDevices = env->CallIntMethod(javaCmDevInfoObject, cid); - } - - if (numberOfDevices > 0) - return numberOfDevices; - return 0; + return g_camera_info->size(); } int32_t DeviceInfoAndroid::GetDeviceName( @@ -89,222 +200,56 @@ int32_t DeviceInfoAndroid::GetDeviceName( uint32_t deviceUniqueIdUTF8Length, char* /*productUniqueIdUTF8*/, uint32_t /*productUniqueIdUTF8Length*/) { - - int32_t result = 0; - AutoLocalJNIFrame jniFrame; - JNIEnv* env = jniFrame.GetEnv(); - if (!env) - return -1; - - jclass javaCmDevInfoClass = jniFrame.GetCmDevInfoClass(); - jobject javaCmDevInfoObject = jniFrame.GetCmDevInfoObject(); - - // get the method ID for the Android Java GetDeviceUniqueName name. - jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetDeviceUniqueName", - "(I)Ljava/lang/String;"); - if (cid != NULL) { - jobject javaDeviceNameObj = env->CallObjectMethod(javaCmDevInfoObject, - cid, deviceNumber); - if (javaDeviceNameObj == NULL || jniFrame.CheckForException()) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Failed to get device name for device %d.", - __FUNCTION__, (int) deviceNumber); - result = -1; - } else { - jboolean isCopy; - const char* javaDeviceNameChar = env->GetStringUTFChars( - (jstring) javaDeviceNameObj - ,&isCopy); - const jsize javaDeviceNameCharLength = - env->GetStringUTFLength((jstring) javaDeviceNameObj); - if ((uint32_t) javaDeviceNameCharLength < - deviceUniqueIdUTF8Length) { - memcpy(deviceUniqueIdUTF8, - javaDeviceNameChar, - javaDeviceNameCharLength + 1); - } - else { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, - _id, "%s: deviceUniqueIdUTF8 to short.", - __FUNCTION__); - result = -1; - } - if ((uint32_t) javaDeviceNameCharLength < deviceNameLength) { - memcpy(deviceNameUTF8, - javaDeviceNameChar, - javaDeviceNameCharLength + 1); - } - env->ReleaseStringUTFChars((jstring) javaDeviceNameObj, - javaDeviceNameChar); - } // javaDeviceNameObj == NULL - + if (deviceNumber >= g_camera_info->size()) + return -1; + const AndroidCameraInfo& info = g_camera_info->at(deviceNumber); + if (info.name.length() + 1 > deviceNameLength || + info.name.length() + 1 > deviceUniqueIdUTF8Length) { + return -1; } - else { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: Failed to find GetDeviceUniqueName function id", - __FUNCTION__); - result = -1; - } - - WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, - "%s: result %d", __FUNCTION__, (int) result); - return result; - + memcpy(deviceNameUTF8, info.name.c_str(), info.name.length() + 1); + memcpy(deviceUniqueIdUTF8, info.name.c_str(), info.name.length() + 1); + return 0; } int32_t DeviceInfoAndroid::CreateCapabilityMap( const char* deviceUniqueIdUTF8) { - for (std::map::iterator it = - _captureCapabilities.begin(); - it != _captureCapabilities.end(); - ++it) - delete it->second; _captureCapabilities.clear(); - - AutoLocalJNIFrame jniFrame; - JNIEnv* env = jniFrame.GetEnv(); - if (!env) - return -1; - - jclass javaCmDevInfoClass = jniFrame.GetCmDevInfoClass(); - jobject javaCmDevInfoObject = jniFrame.GetCmDevInfoObject(); - - // Find the capability class - jclass javaCapClass = jsjni_GetGlobalClassRef(AndroidJavaCaptureCapabilityClass); - if (javaCapClass == NULL) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: SetAndroidCaptureClasses must be called first!", - __FUNCTION__); + const AndroidCameraInfo* info = FindCameraInfoByName(deviceUniqueIdUTF8); + if (info == NULL) return -1; + + for (size_t i = 0; i < info->resolutions.size(); ++i) { + const std::pair& size = info->resolutions[i]; + VideoCaptureCapability cap; + cap.width = size.first; + cap.height = size.second; + cap.maxFPS = info->max_mfps / 1000; + cap.expectedCaptureDelay = kExpectedCaptureDelay; + cap.rawType = kVideoNV21; + _captureCapabilities.push_back(cap); } - - // get the method ID for the Android Java GetCapabilityArray . - jmethodID cid = env->GetMethodID( - javaCmDevInfoClass, - "GetCapabilityArray", - "(Ljava/lang/String;)[Lorg/webrtc/videoengine/CaptureCapabilityAndroid;"); - if (cid == NULL) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Can't find method GetCapabilityArray.", __FUNCTION__); - return -1; - } - // Create a jstring so we can pass the deviceUniquName to the java method. - jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8); - - if (capureIdString == NULL) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Can't create string for method GetCapabilityArray.", - __FUNCTION__); - return -1; - } - // Call the java class and get an array with capabilities back. - jobject javaCapabilitiesObj = env->CallObjectMethod(javaCmDevInfoObject, - cid, capureIdString); - if (!javaCapabilitiesObj || jniFrame.CheckForException()) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Failed to call java GetCapabilityArray.", - __FUNCTION__); - return -1; - } - - jfieldID widthField = env->GetFieldID(javaCapClass, "width", "I"); - jfieldID heigtField = env->GetFieldID(javaCapClass, "height", "I"); - jfieldID maxFpsField = env->GetFieldID(javaCapClass, "maxFPS", "I"); - if (widthField == NULL || heigtField == NULL || maxFpsField == NULL) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Failed to get field Id.", __FUNCTION__); - return -1; - } - - const jsize numberOfCapabilities = - env->GetArrayLength((jarray) javaCapabilitiesObj); - - for (jsize i = 0; i < numberOfCapabilities; ++i) { - VideoCaptureCapability *cap = new VideoCaptureCapability(); - jobject capabilityElement = env->GetObjectArrayElement( - (jobjectArray) javaCapabilitiesObj, - i); - - cap->width = env->GetIntField(capabilityElement, widthField); - cap->height = env->GetIntField(capabilityElement, heigtField); - cap->expectedCaptureDelay = _expectedCaptureDelay; - cap->rawType = kVideoNV21; - cap->maxFPS = env->GetIntField(capabilityElement, maxFpsField); - WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id, - "%s: Cap width %d, height %d, fps %d", __FUNCTION__, - cap->width, cap->height, cap->maxFPS); - _captureCapabilities[i] = cap; - } - - _lastUsedDeviceNameLength = strlen((char*) deviceUniqueIdUTF8); - _lastUsedDeviceName = (char*) realloc(_lastUsedDeviceName, - _lastUsedDeviceNameLength + 1); - memcpy(_lastUsedDeviceName, - deviceUniqueIdUTF8, - _lastUsedDeviceNameLength + 1); - - env->DeleteGlobalRef(javaCapClass); - - WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id, - "CreateCapabilityMap %d", _captureCapabilities.size()); - return _captureCapabilities.size(); } int32_t DeviceInfoAndroid::GetOrientation( const char* deviceUniqueIdUTF8, VideoCaptureRotation& orientation) { - AutoLocalJNIFrame jniFrame; - JNIEnv* env = jniFrame.GetEnv(); - if (!env) - return -1; - - jclass javaCmDevInfoClass = jniFrame.GetCmDevInfoClass(); - jobject javaCmDevInfoObject = jniFrame.GetCmDevInfoObject(); - - // get the method ID for the Android Java GetOrientation . - jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetOrientation", - "(Ljava/lang/String;)I"); - if (cid == NULL) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Can't find method GetOrientation.", __FUNCTION__); + const AndroidCameraInfo* info = FindCameraInfoByName(deviceUniqueIdUTF8); + if (info == NULL || + !VideoCaptureImpl::RotationFromDegrees(info->orientation, &orientation)) { return -1; } - // Create a jstring so we can pass the deviceUniquName to the java method. - jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8); - if (capureIdString == NULL) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Can't create string for method GetCapabilityArray.", - __FUNCTION__); - return -1; - } - // Call the java class and get the orientation. - jint jorientation = env->CallIntMethod(javaCmDevInfoObject, cid, - capureIdString); + return 0; +} - int32_t retValue = 0; - switch (jorientation) { - case -1: // Error - orientation = kCameraRotate0; - retValue = -1; - break; - case 0: - orientation = kCameraRotate0; - break; - case 90: - orientation = kCameraRotate90; - break; - case 180: - orientation = kCameraRotate180; - break; - case 270: - orientation = kCameraRotate270; - break; - case 360: - orientation = kCameraRotate0; - break; - } - return retValue; +void DeviceInfoAndroid::GetFpsRange(const char* deviceUniqueIdUTF8, + int* min_mfps, int* max_mfps) { + const AndroidCameraInfo* info = FindCameraInfoByName(deviceUniqueIdUTF8); + if (info == NULL) + return; + *min_mfps = info->min_mfps; + *max_mfps = info->max_mfps; } } // namespace videocapturemodule diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.h b/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.h index 0fd938e056f7..47f4121302f2 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.h +++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.h @@ -24,19 +24,18 @@ namespace webrtc namespace videocapturemodule { -// Android logging, uncomment to print trace to -// logcat instead of trace file/callback -// #include -// #define WEBRTC_TRACE(a,b,c,...) -// __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__) - class DeviceInfoAndroid : public DeviceInfoImpl { - public: - static void SetAndroidCaptureClasses(jclass capabilityClass); - DeviceInfoAndroid(const int32_t id); - int32_t Init(); + static void Initialize(JNIEnv* env); + + DeviceInfoAndroid(int32_t id); virtual ~DeviceInfoAndroid(); + + // Set |*index| to the index of the camera matching |deviceUniqueIdUTF8|, or + // return false if no match. + bool FindCameraIndex(const char* deviceUniqueIdUTF8, size_t* index); + + virtual int32_t Init(); virtual uint32_t NumberOfDevices(); virtual int32_t GetDeviceName( uint32_t deviceNumber, @@ -56,9 +55,14 @@ class DeviceInfoAndroid : public DeviceInfoImpl { uint32_t /*positionY*/) { return -1; } virtual int32_t GetOrientation(const char* deviceUniqueIdUTF8, VideoCaptureRotation& orientation); + + // Populate |min_mfps| and |max_mfps| with the supported range of the device. + void GetFpsRange(const char* deviceUniqueIdUTF8, + int* min_mfps, + int* max_mfps); + private: - bool IsDeviceNameMatches(const char* name, const char* deviceUniqueIdUTF8); - enum {_expectedCaptureDelay = 190}; + enum { kExpectedCaptureDelay = 190}; }; } // namespace videocapturemodule diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/CaptureCapabilityAndroid.java b/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/CaptureCapabilityAndroid.java index 691aa25edc24..6cefced35a6c 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/CaptureCapabilityAndroid.java +++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/CaptureCapabilityAndroid.java @@ -14,7 +14,11 @@ import org.mozilla.gecko.mozglue.WebRTCJNITarget; @WebRTCJNITarget public class CaptureCapabilityAndroid { - public int width = 0; - public int height = 0; - public int maxFPS = 0; + public String name; + public int width[]; + public int height[]; + public int minMilliFPS; + public int maxMilliFPS; + public boolean frontFacing; + public int orientation; } diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java b/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java index 7b58b05bfa14..e08bef3b2633 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java +++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java @@ -15,9 +15,6 @@ import java.util.Locale; import java.util.List; import java.util.concurrent.locks.ReentrantLock; -import org.webrtc.videoengine.CaptureCapabilityAndroid; -import org.webrtc.videoengine.VideoCaptureDeviceInfoAndroid.AndroidVideoCaptureDevice; - import android.graphics.ImageFormat; import android.graphics.PixelFormat; import android.graphics.Rect; @@ -29,379 +26,280 @@ import android.util.Log; import android.view.Surface; import android.view.SurfaceHolder; import android.view.SurfaceHolder.Callback; -import android.view.SurfaceView; -import android.view.TextureView; -import android.view.TextureView.SurfaceTextureListener; -import android.view.View; -import org.mozilla.gecko.GeckoApp; import org.mozilla.gecko.GeckoAppShell; import org.mozilla.gecko.GeckoAppShell.AppStateListener; -import org.mozilla.gecko.util.ThreadUtils; import org.mozilla.gecko.mozglue.WebRTCJNITarget; +// Wrapper for android Camera, with support for direct local preview rendering. +// Threading notes: this class is called from ViE C++ code, and from Camera & +// SurfaceHolder Java callbacks. Since these calls happen on different threads, +// the entry points to this class are all synchronized. This shouldn't present +// a performance bottleneck because only onPreviewFrame() is called more than +// once (and is called serially on a single thread), so the lock should be +// uncontended. public class VideoCaptureAndroid implements PreviewCallback, Callback { + private final static String TAG = "WEBRTC-JC"; - private final static String TAG = "WEBRTC-JC"; + private Camera camera; // Only non-null while capturing. + private Camera.CameraInfo info = null; + private final int id; + private final long native_capturer; // |VideoCaptureAndroid*| in C++. + private SurfaceHolder localPreview; + private SurfaceTexture dummySurfaceTexture; + // Arbitrary queue depth. Higher number means more memory allocated & held, + // lower number means more sensitivity to processing time in the client (and + // potentially stalling the capturer if it runs out of buffers to write to). + private final int numCaptureBuffers = 3; + // Needed to start/stop/rotate camera. + private AppStateListener mAppStateListener = null; + private int mCaptureRotation = 0; + private int mCaptureWidth = 0; + private int mCaptureHeight = 0; + private int mCaptureMinFPS = 0; + private int mCaptureMaxFPS = 0; + // Are we being told to start/stop the camera, or just suspending/resuming + // due to the application being backgrounded. + private boolean mResumeCapture = false; - private Camera camera; - private int cameraId; - private AndroidVideoCaptureDevice currentDevice = null; - public ReentrantLock previewBufferLock = new ReentrantLock(); - // This lock takes sync with StartCapture and SurfaceChanged - private ReentrantLock captureLock = new ReentrantLock(); - private int PIXEL_FORMAT = ImageFormat.NV21; - PixelFormat pixelFormat = new PixelFormat(); - // True when the C++ layer has ordered the camera to be started. - private boolean isCaptureStarted = false; - private boolean isCaptureRunning = false; - private boolean isSurfaceReady = false; - private SurfaceHolder surfaceHolder = null; - private SurfaceTexture surfaceTexture = null; - private SurfaceTexture dummySurfaceTexture = null; - - private final int numCaptureBuffers = 3; - private int expectedFrameSize = 0; - private int orientation = 0; - private int id = 0; - // C++ callback context variable. - private long context = 0; - private SurfaceHolder localPreview = null; - // True if this class owns the preview video buffers. - private boolean ownsBuffers = false; - - private int mCaptureWidth = -1; - private int mCaptureHeight = -1; - private int mCaptureFPS = -1; - - private int mCaptureRotation = 0; - - private AppStateListener mAppStateListener = null; - - public class MySurfaceTextureListener implements TextureView.SurfaceTextureListener { - public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) { - Log.d(TAG, "VideoCaptureAndroid::onSurfaceTextureAvailable"); - - captureLock.lock(); - isSurfaceReady = true; - surfaceTexture = surface; - - tryStartCapture(mCaptureWidth, mCaptureHeight, mCaptureFPS); - captureLock.unlock(); - } - - public void onSurfaceTextureSizeChanged(SurfaceTexture surface, - int width, int height) { - // Ignored, Camera does all the work for us - // Note that for a TextureView we start on onSurfaceTextureAvailable, - // for a SurfaceView we start on surfaceChanged. TextureView - // will not give out an onSurfaceTextureSizeChanged during creation. - } - - public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) { - Log.d(TAG, "VideoCaptureAndroid::onSurfaceTextureDestroyed"); - isSurfaceReady = false; - DetachCamera(); - return true; - } - - public void onSurfaceTextureUpdated(SurfaceTexture surface) { - // Invoked every time there's a new Camera preview frame - } + @WebRTCJNITarget + public VideoCaptureAndroid(int id, long native_capturer) { + this.id = id; + this.native_capturer = native_capturer; + if(android.os.Build.VERSION.SDK_INT>8) { + this.info = new Camera.CameraInfo(); + Camera.getCameraInfo(id, info); } + mCaptureRotation = GetRotateAmount(); + } - @WebRTCJNITarget - public static - void DeleteVideoCaptureAndroid(VideoCaptureAndroid captureAndroid) { - Log.d(TAG, "DeleteVideoCaptureAndroid"); - - GeckoAppShell.getGeckoInterface().removeAppStateListener(captureAndroid.mAppStateListener); - - captureAndroid.StopCapture(); - if (captureAndroid.camera != null) { - captureAndroid.camera.release(); - captureAndroid.camera = null; + private void LinkAppStateListener() { + mAppStateListener = new AppStateListener() { + @Override + public void onPause() { + if (camera != null) { + mResumeCapture = true; + stopCapture(); } - captureAndroid.context = 0; - - View cameraView = GeckoAppShell.getGeckoInterface().getCameraView(); - if (cameraView instanceof SurfaceView) { - ((SurfaceView)cameraView).getHolder().removeCallback(captureAndroid); - } else if (cameraView instanceof TextureView) { - // No need to explicitly remove the Listener: - // i.e. ((SurfaceView)cameraView).setSurfaceTextureListener(null); + } + @Override + public void onResume() { + if (mResumeCapture) { + startCapture(mCaptureWidth, mCaptureHeight, mCaptureMinFPS, mCaptureMaxFPS); + mResumeCapture = false; } - ThreadUtils.getUiHandler().post(new Runnable() { - @Override - public void run() { - try { - GeckoAppShell.getGeckoInterface().disableCameraView(); - } catch (Exception e) { - Log.e(TAG, - "VideoCaptureAndroid disableCameraView exception: " + - e.getLocalizedMessage()); - } - } - }); - } - - public VideoCaptureAndroid(int in_id, long in_context, Camera in_camera, - AndroidVideoCaptureDevice in_device, - int in_cameraId) { - id = in_id; - context = in_context; - camera = in_camera; - cameraId = in_cameraId; - currentDevice = in_device; + } + @Override + public void onOrientationChanged() { mCaptureRotation = GetRotateAmount(); + } + }; + GeckoAppShell.getGeckoInterface().addAppStateListener(mAppStateListener); + } - try { - View cameraView = GeckoAppShell.getGeckoInterface().getCameraView(); - if (cameraView instanceof SurfaceView) { - ((SurfaceView)cameraView).getHolder().addCallback(this); - } else if (cameraView instanceof TextureView) { - MySurfaceTextureListener listener = new MySurfaceTextureListener(); - ((TextureView)cameraView).setSurfaceTextureListener(listener); - } - ThreadUtils.getUiHandler().post(new Runnable() { - @Override - public void run() { - try { - GeckoAppShell.getGeckoInterface().enableCameraView(); - } catch (Exception e) { - Log.e(TAG, - "VideoCaptureAndroid enableCameraView exception: " - + e.getLocalizedMessage()); - } - } - }); - } catch (Exception ex) { - Log.e(TAG, "VideoCaptureAndroid constructor exception: " + - ex.getLocalizedMessage()); - } + private void RemoveAppStateListener() { + GeckoAppShell.getGeckoInterface().removeAppStateListener(mAppStateListener); + } - mAppStateListener = new AppStateListener() { - @Override - public void onPause() { - StopCapture(); - if (camera != null) { - camera.release(); - camera = null; - } - } - @Override - public void onResume() { - try { - if(android.os.Build.VERSION.SDK_INT>8) { - camera = Camera.open(cameraId); - } else { - camera = Camera.open(); - } - } catch (Exception ex) { - Log.e(TAG, "Error reopening to the camera: " + ex.getMessage()); - } - captureLock.lock(); - isCaptureStarted = true; - tryStartCapture(mCaptureWidth, mCaptureHeight, mCaptureFPS); - captureLock.unlock(); - } - @Override - public void onOrientationChanged() { - mCaptureRotation = GetRotateAmount(); - } - }; - - GeckoAppShell.getGeckoInterface().addAppStateListener(mAppStateListener); + public int GetRotateAmount() { + int rotation = GeckoAppShell.getGeckoInterface().getActivity().getWindowManager().getDefaultDisplay().getRotation(); + int degrees = 0; + switch (rotation) { + case Surface.ROTATION_0: degrees = 0; break; + case Surface.ROTATION_90: degrees = 90; break; + case Surface.ROTATION_180: degrees = 180; break; + case Surface.ROTATION_270: degrees = 270; break; } + if(android.os.Build.VERSION.SDK_INT>8) { + int result; + if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) { + result = (info.orientation + degrees) % 360; + } else { // back-facing + result = (info.orientation - degrees + 360) % 360; + } + return result; + } else { + // Assume 90deg orientation for Froyo devices. + // Only back-facing cameras are supported in Froyo. + int orientation = 90; + int result = (orientation - degrees + 360) % 360; + return result; + } + } - public int GetRotateAmount() { - int rotation = GeckoAppShell.getGeckoInterface().getActivity().getWindowManager().getDefaultDisplay().getRotation(); - int degrees = 0; - switch (rotation) { - case Surface.ROTATION_0: degrees = 0; break; - case Surface.ROTATION_90: degrees = 90; break; - case Surface.ROTATION_180: degrees = 180; break; - case Surface.ROTATION_270: degrees = 270; break; + // Called by native code. Returns true if capturer is started. + // + // Note that this actually opens the camera, which can be a slow operation and + // thus might be done on a background thread, but ViE API needs a + // synchronous success return value so we can't do that. + @WebRTCJNITarget + private synchronized boolean startCapture( + int width, int height, int min_mfps, int max_mfps) { + Log.d(TAG, "startCapture: " + width + "x" + height + "@" + + min_mfps + ":" + max_mfps); + if (!mResumeCapture) { + ViERenderer.CreateLocalRenderer(); + } + Throwable error = null; + try { + if(android.os.Build.VERSION.SDK_INT>8) { + camera = Camera.open(id); + } else { + camera = Camera.open(); + } + + localPreview = ViERenderer.GetLocalRenderer(); + if (localPreview != null) { + localPreview.addCallback(this); + if (localPreview.getSurface() != null && + localPreview.getSurface().isValid()) { + camera.setPreviewDisplay(localPreview); } - if(android.os.Build.VERSION.SDK_INT>8) { - android.hardware.Camera.CameraInfo info = - new android.hardware.Camera.CameraInfo(); - android.hardware.Camera.getCameraInfo(cameraId, info); - int result; - if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) { - result = (info.orientation + degrees) % 360; - } else { // back-facing - result = (info.orientation - degrees + 360) % 360; - } - return result; + } else { + if(android.os.Build.VERSION.SDK_INT>10) { + // No local renderer (we only care about onPreviewFrame() buffers, not a + // directly-displayed UI element). Camera won't capture without + // setPreview{Texture,Display}, so we create a dummy SurfaceTexture and + // hand it over to Camera, but never listen for frame-ready callbacks, + // and never call updateTexImage on it. + try { + // "42" because http://goo.gl/KaEn8 + dummySurfaceTexture = new SurfaceTexture(42); + camera.setPreviewTexture(dummySurfaceTexture); + } catch (IOException e) { + throw new RuntimeException(e); + } } else { - // Assume 90deg orientation for Froyo devices. - // Only back-facing cameras are supported in Froyo. - int orientation = 90; - int result = (orientation - degrees + 360) % 360; - return result; + throw new RuntimeException("No preview surface for Camera."); } - } + } - private int tryStartCapture(int width, int height, int frameRate) { - if (camera == null) { - Log.e(TAG, "Camera not initialized %d" + id); - return -1; + Camera.Parameters parameters = camera.getParameters(); + // This wasn't added until ICS MR1. + if(android.os.Build.VERSION.SDK_INT>14) { + Log.d(TAG, "isVideoStabilizationSupported: " + + parameters.isVideoStabilizationSupported()); + if (parameters.isVideoStabilizationSupported()) { + parameters.setVideoStabilization(true); } + } + List focusModeList = parameters.getSupportedFocusModes(); + if (focusModeList.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) { + parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); + } + parameters.setPreviewSize(width, height); + if (android.os.Build.VERSION.SDK_INT>8) { + parameters.setPreviewFpsRange(min_mfps, max_mfps); + } else { + parameters.setPreviewFrameRate(max_mfps / 1000); + } + int format = ImageFormat.NV21; + parameters.setPreviewFormat(format); + camera.setParameters(parameters); + int bufSize = width * height * ImageFormat.getBitsPerPixel(format) / 8; + for (int i = 0; i < numCaptureBuffers; i++) { + camera.addCallbackBuffer(new byte[bufSize]); + } + camera.setPreviewCallbackWithBuffer(this); + camera.startPreview(); + // Remember parameters we were started with. + mCaptureWidth = width; + mCaptureHeight = height; + mCaptureMinFPS = min_mfps; + mCaptureMaxFPS = max_mfps; + // If we are resuming a paused capture, the listener is already active. + if (!mResumeCapture) { + LinkAppStateListener(); + } + return true; + } catch (IOException e) { + error = e; + } catch (RuntimeException e) { + error = e; + } + Log.e(TAG, "startCapture failed", error); + if (camera != null) { + stopCapture(); + } + return false; + } - Log.d(TAG, "tryStartCapture " + width + - " height " + height +" frame rate " + frameRate + - " isCaptureRunning " + isCaptureRunning + - " isSurfaceReady " + isSurfaceReady + - " isCaptureStarted " + isCaptureStarted); - - if (isCaptureRunning || !isSurfaceReady || !isCaptureStarted) { - return 0; + // Called by native code. Returns true when camera is known to be stopped. + @WebRTCJNITarget + private synchronized boolean stopCapture() { + Log.d(TAG, "stopCapture"); + if (camera == null) { + throw new RuntimeException("Camera is already stopped!"); + } + Throwable error = null; + try { + camera.setPreviewCallbackWithBuffer(null); + camera.stopPreview(); + if (localPreview != null) { + localPreview.removeCallback(this); + camera.setPreviewDisplay(null); + } else { + if(android.os.Build.VERSION.SDK_INT>10) { + camera.setPreviewTexture(null); } - - try { - if (surfaceHolder != null) - camera.setPreviewDisplay(surfaceHolder); - if (surfaceTexture != null) - camera.setPreviewTexture(surfaceTexture); - if (surfaceHolder == null && surfaceTexture == null) { - // No local renderer. Camera won't capture without - // setPreview{Texture,Display}, so we create a dummy SurfaceTexture - // and hand it over to Camera, but never listen for frame-ready - // callbacks, and never call updateTexImage on it. - try { - dummySurfaceTexture = new SurfaceTexture(42); - camera.setPreviewTexture(dummySurfaceTexture); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - CaptureCapabilityAndroid currentCapability = - new CaptureCapabilityAndroid(); - currentCapability.width = width; - currentCapability.height = height; - currentCapability.maxFPS = frameRate; - PixelFormat.getPixelFormatInfo(PIXEL_FORMAT, pixelFormat); - - - Camera.Parameters parameters = camera.getParameters(); - - List focusModeList = parameters.getSupportedFocusModes(); - if (focusModeList.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) { - parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); - } - - parameters.setPreviewSize(currentCapability.width, - currentCapability.height); - parameters.setPreviewFormat(PIXEL_FORMAT); - parameters.setPreviewFrameRate(currentCapability.maxFPS); - camera.setParameters(parameters); - - int bufSize = width * height * pixelFormat.bitsPerPixel / 8; - byte[] buffer = null; - for (int i = 0; i < numCaptureBuffers; i++) { - buffer = new byte[bufSize]; - camera.addCallbackBuffer(buffer); - } - camera.setPreviewCallbackWithBuffer(this); - ownsBuffers = true; - - camera.startPreview(); - previewBufferLock.lock(); - expectedFrameSize = bufSize; - isCaptureRunning = true; - previewBufferLock.unlock(); - - } - catch (Exception ex) { - Log.e(TAG, "Failed to start camera: " + ex.getMessage()); - return -1; - } - - isCaptureRunning = true; - return 0; + } + camera.release(); + camera = null; + // If we want to resume after onResume, keep the listener in place. + if (!mResumeCapture) { + RemoveAppStateListener(); + ViERenderer.DestroyLocalRenderer(); + } + return true; + } catch (IOException e) { + error = e; + } catch (RuntimeException e) { + error = e; } + Log.e(TAG, "Failed to stop camera", error); + return false; + } - @WebRTCJNITarget - public int StartCapture(int width, int height, int frameRate) { - Log.d(TAG, "StartCapture width " + width + - " height " + height +" frame rate " + frameRate); - captureLock.lock(); - isCaptureStarted = true; - mCaptureWidth = width; - mCaptureHeight = height; - mCaptureFPS = frameRate; + @WebRTCJNITarget + private native void ProvideCameraFrame( + byte[] data, int length, long captureObject, int rotation); - int res = tryStartCapture(mCaptureWidth, mCaptureHeight, mCaptureFPS); - - captureLock.unlock(); - return res; + @WebRTCJNITarget + public synchronized void onPreviewFrame(byte[] data, Camera camera) { + if (data != null) { + ProvideCameraFrame(data, data.length, native_capturer, mCaptureRotation); + camera.addCallbackBuffer(data); } + } - public int DetachCamera() { - try { - previewBufferLock.lock(); - isCaptureRunning = false; - previewBufferLock.unlock(); - if (camera != null) { - camera.setPreviewCallbackWithBuffer(null); - camera.stopPreview(); - } - } catch (Exception ex) { - Log.e(TAG, "Failed to stop camera: " + ex.getMessage()); - return -1; - } - return 0; + @WebRTCJNITarget + public synchronized void surfaceChanged( + SurfaceHolder holder, int format, int width, int height) { + Log.d(TAG, "VideoCaptureAndroid::surfaceChanged ignored: " + + format + ": " + width + "x" + height); + } + + @WebRTCJNITarget + public synchronized void surfaceCreated(SurfaceHolder holder) { + Log.d(TAG, "VideoCaptureAndroid::surfaceCreated"); + try { + if (camera != null) { + camera.setPreviewDisplay(holder); + } + } catch (IOException e) { + throw new RuntimeException(e); } + } - public int StopCapture() { - Log.d(TAG, "StopCapture"); - isCaptureStarted = false; - return DetachCamera(); - } - - native void ProvideCameraFrame(byte[] data, int length, int rotation, - long captureObject); - - public void onPreviewFrame(byte[] data, Camera camera) { - previewBufferLock.lock(); - - if (isCaptureRunning) { - // If StartCapture has been called but not StopCapture - // Call the C++ layer with the captured frame - if (data != null && data.length == expectedFrameSize) { - ProvideCameraFrame(data, expectedFrameSize, mCaptureRotation, - context); - if (ownsBuffers) { - // Give the video buffer to the camera service again. - camera.addCallbackBuffer(data); - } - } - } - previewBufferLock.unlock(); - } - - public void surfaceChanged(SurfaceHolder holder, - int format, int width, int height) { - Log.d(TAG, "VideoCaptureAndroid::surfaceChanged"); - - captureLock.lock(); - isSurfaceReady = true; - surfaceHolder = holder; - - tryStartCapture(mCaptureWidth, mCaptureHeight, mCaptureFPS); - captureLock.unlock(); - return; - } - - public void surfaceCreated(SurfaceHolder holder) { - Log.d(TAG, "VideoCaptureAndroid::surfaceCreated"); - } - - public void surfaceDestroyed(SurfaceHolder holder) { - Log.d(TAG, "VideoCaptureAndroid::surfaceDestroyed"); - isSurfaceReady = false; - DetachCamera(); + @WebRTCJNITarget + public synchronized void surfaceDestroyed(SurfaceHolder holder) { + Log.d(TAG, "VideoCaptureAndroid::surfaceDestroyed"); + try { + if (camera != null) { + camera.setPreviewDisplay(null); + } + } catch (IOException e) { + throw new RuntimeException(e); } + } } diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java b/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java index 62535ab51154..33d64dfe5b66 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java +++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java @@ -17,390 +17,125 @@ import java.util.ArrayList; import java.util.List; import java.util.Locale; -import dalvik.system.DexClassLoader; - import android.content.Context; -import android.hardware.Camera; +import android.hardware.Camera.CameraInfo; +import android.hardware.Camera.Parameters; import android.hardware.Camera.Size; +import android.hardware.Camera; import android.util.Log; import org.mozilla.gecko.mozglue.WebRTCJNITarget; public class VideoCaptureDeviceInfoAndroid { + private final static String TAG = "WEBRTC-JC"; - //Context - Context context; + private static boolean isFrontFacing(CameraInfo info) { + return info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT; + } - // Set VERBOSE as the default logging level because camera device info - // is very useful information and doesn't degrade performance normally - private final static String TAG = "WEBRTC"; + private static String deviceUniqueName(int index, CameraInfo info) { + return "Camera " + index +", Facing " + + (isFrontFacing(info) ? "front" : "back") + + ", Orientation "+ info.orientation; + } - // Private class with info about all available cameras and the capabilities - @WebRTCJNITarget - public class AndroidVideoCaptureDevice { - AndroidVideoCaptureDevice() { - frontCameraType = FrontFacingCameraType.None; - index = 0; - } + // Returns information about all cameras on the device. + // Since this reflects static information about the hardware present, there is + // no need to call this function more than once in a single process. It is + // marked "private" as it is only called by native code. + @WebRTCJNITarget + private static CaptureCapabilityAndroid[] getDeviceInfo() { + ArrayList allDevices = new ArrayList(); + int numCameras = 1; + if (android.os.Build.VERSION.SDK_INT >= 9) { + numCameras = Camera.getNumberOfCameras(); + } + for (int i = 0; i < numCameras; ++i) { + String uniqueName = null; + CameraInfo info = null; + if (android.os.Build.VERSION.SDK_INT >= 9) { + info = new CameraInfo(); + Camera.getCameraInfo(i, info); + uniqueName = deviceUniqueName(i, info); + } else { + uniqueName = "Camera 0, Facing back, Orientation 90"; + } - public String deviceUniqueName; - public CaptureCapabilityAndroid captureCapabilies[]; - public FrontFacingCameraType frontCameraType; + List supportedSizes = null; + List supportedFpsRanges = null; + try { + Camera camera = null; + if (android.os.Build.VERSION.SDK_INT >= 9) { + camera = Camera.open(i); + } else { + camera = Camera.open(); + } + Parameters parameters = camera.getParameters(); + supportedSizes = parameters.getSupportedPreviewSizes(); + if (android.os.Build.VERSION.SDK_INT >= 9) { + supportedFpsRanges = parameters.getSupportedPreviewFpsRange(); + } + // getSupportedPreviewFpsRange doesn't actually work on a bunch + // of Gingerbread devices. + if (supportedFpsRanges == null) { + supportedFpsRanges = new ArrayList(); + List frameRates = parameters.getSupportedPreviewFrameRates(); + if (frameRates != null) { + for (Integer rate: frameRates) { + int[] range = new int[2]; + // minFPS = maxFPS, convert to milliFPS + range[0] = rate * 1000; + range[1] = rate * 1000; + supportedFpsRanges.add(range); + } + } else { + Log.e(TAG, "Camera doesn't know its own framerate, guessing 25fps."); + int[] range = new int[2]; + // Your guess is as good as mine + range[0] = 25 * 1000; + range[1] = 25 * 1000; + supportedFpsRanges.add(range); + } + } + camera.release(); + Log.d(TAG, uniqueName); + } catch (RuntimeException e) { + Log.e(TAG, "Failed to open " + uniqueName + ", skipping due to: " + + e.getLocalizedMessage()); + continue; + } - // Orientation of camera as described in - // android.hardware.Camera.CameraInfo.Orientation - public int orientation; - // Camera index used in Camera.Open on Android 2.3 and onwards - public int index; - } + CaptureCapabilityAndroid device = new CaptureCapabilityAndroid(); - public enum FrontFacingCameraType { - None, // This is not a front facing camera - GalaxyS, // Galaxy S front facing camera. - HTCEvo, // HTC Evo front facing camera - Android23, // Android 2.3 front facing camera. - } + int sizeLen = supportedSizes.size(); + device.width = new int[sizeLen]; + device.height = new int[sizeLen]; - String currentDeviceUniqueId; - int id; - List deviceList; - - @WebRTCJNITarget - public static VideoCaptureDeviceInfoAndroid - CreateVideoCaptureDeviceInfoAndroid(int in_id, Context in_context) { - Log.d(TAG, - String.format(Locale.US, "VideoCaptureDeviceInfoAndroid")); - - VideoCaptureDeviceInfoAndroid self = - new VideoCaptureDeviceInfoAndroid(in_id, in_context); - if(self != null && self.Init() == 0) { - return self; - } - else { - Log.d(TAG, "Failed to create VideoCaptureDeviceInfoAndroid."); - } - return null; - } - - private VideoCaptureDeviceInfoAndroid(int in_id, - Context in_context) { - id = in_id; - context = in_context; - deviceList = new ArrayList(); - } - - private int Init() { - // Populate the deviceList with available cameras and their capabilities. - Camera camera = null; - if(android.os.Build.VERSION.SDK_INT > 8) { - // From Android 2.3 and onwards - for(int i = 0; i < Camera.getNumberOfCameras(); ++i) { - AndroidVideoCaptureDevice newDevice = new AndroidVideoCaptureDevice(); - - Camera.CameraInfo info = new Camera.CameraInfo(); - Camera.getCameraInfo(i, info); - newDevice.index = i; - newDevice.orientation=info.orientation; - if(info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) { - newDevice.deviceUniqueName = - "Camera " + i +", Facing back, Orientation "+ info.orientation; - Log.d(TAG, "Camera " + i +", Facing back, Orientation "+ info.orientation); - - } - else { - newDevice.deviceUniqueName = - "Camera " + i +", Facing front, Orientation "+ info.orientation; - newDevice.frontCameraType = FrontFacingCameraType.Android23; - Log.d(TAG, "Camera " + i +", Facing front, Orientation "+ info.orientation); - } - - camera = Camera.open(i); - Camera.Parameters parameters = camera.getParameters(); - AddDeviceInfo(newDevice, parameters); - camera.release(); - camera = null; - deviceList.add(newDevice); - } - } - VerifyCapabilities(); - return 0; - } - - // Adds the capture capabilities of the currently opened device - private void AddDeviceInfo(AndroidVideoCaptureDevice newDevice, - Camera.Parameters parameters) { - - List sizes = parameters.getSupportedPreviewSizes(); - List frameRates = parameters.getSupportedPreviewFrameRates(); - int maxFPS = 0; - if (frameRates != null) { - for(Integer frameRate:frameRates) { - if(frameRate > maxFPS) { - maxFPS = frameRate; - } - } - } - - newDevice.captureCapabilies = new CaptureCapabilityAndroid[sizes.size()]; - for(int i = 0; i < sizes.size(); ++i) { - Size s = sizes.get(i); - newDevice.captureCapabilies[i] = new CaptureCapabilityAndroid(); - newDevice.captureCapabilies[i].height = s.height; - newDevice.captureCapabilies[i].width = s.width; - newDevice.captureCapabilies[i].maxFPS = maxFPS; - Log.v(TAG, "VideoCaptureDeviceInfo " + ", maxFPS: " + maxFPS + - ", width: " + s.width + ", height: " + s.height); - } - } - - // Function that make sure device specific capabilities are - // in the capability list. - // Ie Galaxy S supports CIF but does not list CIF as a supported capability. - // Motorola Droid Camera does not work with frame rate above 15fps. - // http://code.google.com/p/android/issues/detail?id=5514#c0 - private void VerifyCapabilities() { - // Nexus S or Galaxy S - if(android.os.Build.DEVICE.equals("GT-I9000") || - android.os.Build.DEVICE.equals("crespo")) { - CaptureCapabilityAndroid specificCapability = - new CaptureCapabilityAndroid(); - specificCapability.width = 352; - specificCapability.height = 288; - specificCapability.maxFPS = 15; - AddDeviceSpecificCapability(specificCapability); - - specificCapability = new CaptureCapabilityAndroid(); - specificCapability.width = 176; - specificCapability.height = 144; - specificCapability.maxFPS = 15; - AddDeviceSpecificCapability(specificCapability); - - specificCapability = new CaptureCapabilityAndroid(); - specificCapability.width = 320; - specificCapability.height = 240; - specificCapability.maxFPS = 15; - AddDeviceSpecificCapability(specificCapability); - } - // Motorola Milestone Camera server does not work at 30fps - // even though it reports that it can - if(android.os.Build.MANUFACTURER.equals("motorola") && - android.os.Build.DEVICE.equals("umts_sholes")) { - for (AndroidVideoCaptureDevice device : deviceList) { - for (CaptureCapabilityAndroid capability : device.captureCapabilies) { - capability.maxFPS = 15; - } - } - } - } - - private void AddDeviceSpecificCapability( - CaptureCapabilityAndroid specificCapability) { - for(AndroidVideoCaptureDevice device:deviceList) { - boolean foundCapability = false; - for(CaptureCapabilityAndroid capability:device.captureCapabilies) { - if(capability.width == specificCapability.width && - capability.height == specificCapability.height) { - foundCapability = true; - break; - } - } - if(foundCapability==false) { - CaptureCapabilityAndroid newCaptureCapabilies[]= - new CaptureCapabilityAndroid[device.captureCapabilies.length+1]; - for(int i = 0; i < device.captureCapabilies.length; ++i) { - newCaptureCapabilies[i+1] = device.captureCapabilies[i]; - } - newCaptureCapabilies[0] = specificCapability; - device.captureCapabilies = newCaptureCapabilies; - } - } - } - - // Returns the number of Capture devices that is supported - @WebRTCJNITarget - public int NumberOfDevices() { - return deviceList.size(); - } - - @WebRTCJNITarget - public String GetDeviceUniqueName(int deviceNumber) { - if(deviceNumber < 0 || deviceNumber >= deviceList.size()) { - return null; - } - return deviceList.get(deviceNumber).deviceUniqueName; - } - - @WebRTCJNITarget - public CaptureCapabilityAndroid[] GetCapabilityArray (String deviceUniqueId) - { - for (AndroidVideoCaptureDevice device: deviceList) { - if(device.deviceUniqueName.equals(deviceUniqueId)) { - return (CaptureCapabilityAndroid[]) device.captureCapabilies; - } - } - return null; - } - - // Returns the camera orientation as described by - // android.hardware.Camera.CameraInfo.orientation - @WebRTCJNITarget - public int GetOrientation(String deviceUniqueId) { - for (AndroidVideoCaptureDevice device: deviceList) { - if(device.deviceUniqueName.equals(deviceUniqueId)) { - return device.orientation; - } - } - return -1; - } - - // Returns an instance of VideoCaptureAndroid. - @WebRTCJNITarget - public VideoCaptureAndroid AllocateCamera(int id, long context, - String deviceUniqueId) { - try { - Log.d(TAG, "AllocateCamera " + deviceUniqueId); - - Camera camera = null; - int cameraId = 0; - AndroidVideoCaptureDevice deviceToUse = null; - for (AndroidVideoCaptureDevice device: deviceList) { - if(device.deviceUniqueName.equals(deviceUniqueId)) { - // Found the wanted camera - deviceToUse = device; - switch(device.frontCameraType) { - case GalaxyS: - camera = AllocateGalaxySFrontCamera(); - break; - case HTCEvo: - camera = AllocateEVOFrontFacingCamera(); - break; - default: - // From Android 2.3 and onwards) - if(android.os.Build.VERSION.SDK_INT>8) { - cameraId = device.index; - camera = Camera.open(device.index); - } else { - camera = Camera.open(); // Default_ camera - } - } - } - } - - if(camera == null) { - return null; - } - Log.v(TAG, "AllocateCamera - creating VideoCaptureAndroid"); - - return new VideoCaptureAndroid(id, context, camera, deviceToUse, cameraId); - } catch (NoSuchMethodException e) { - Log.e(TAG, "AllocateCamera Failed to open camera", e); - } catch (ClassNotFoundException e) { - Log.e(TAG, "AllocateCamera Failed to open camera", e); - } catch (InvocationTargetException e) { - Log.e(TAG, "AllocateCamera Failed to open camera", e); - } catch (IllegalAccessException e) { - Log.e(TAG, "AllocateCamera Failed to open camera", e); - } - return null; - } - - // Searches for a front facing camera device. This is device specific code. - @WebRTCJNITarget - private Camera.Parameters - SearchOldFrontFacingCameras(AndroidVideoCaptureDevice newDevice) - throws SecurityException, IllegalArgumentException, - NoSuchMethodException, ClassNotFoundException, - IllegalAccessException, InvocationTargetException { - // Check the id of the opened camera device - // Returns null on X10 and 1 on Samsung Galaxy S. - Camera camera = Camera.open(); - Camera.Parameters parameters = camera.getParameters(); - String cameraId = parameters.get("camera-id"); - if(cameraId != null && cameraId.equals("1")) { - // This might be a Samsung Galaxy S with a front facing camera. - parameters.set("camera-id", 2); - camera.setParameters(parameters); - parameters = camera.getParameters(); - newDevice.frontCameraType = FrontFacingCameraType.GalaxyS; - newDevice.orientation = 0; - camera.release(); - return parameters; - } - camera.release(); - - // Check for Evo front facing camera - File file = - new File("/system/framework/com.htc.hardware.twinCamDevice.jar"); - boolean exists = file.exists(); - if (!exists) { - file = - new File("/system/framework/com.sprint.hardware.twinCamDevice.jar"); - exists = file.exists(); - } - if(exists) { - newDevice.frontCameraType = FrontFacingCameraType.HTCEvo; - newDevice.orientation = 0; - Camera evCamera = AllocateEVOFrontFacingCamera(); - parameters = evCamera.getParameters(); - evCamera.release(); - return parameters; - } - return null; - } - - // Returns a handle to HTC front facing camera. - // The caller is responsible to release it on completion. - private Camera AllocateEVOFrontFacingCamera() - throws SecurityException, NoSuchMethodException, - ClassNotFoundException, IllegalArgumentException, - IllegalAccessException, InvocationTargetException { - String classPath = null; - File file = - new File("/system/framework/com.htc.hardware.twinCamDevice.jar"); - classPath = "com.htc.hardware.twinCamDevice.FrontFacingCamera"; - boolean exists = file.exists(); - if (!exists){ - file = - new File("/system/framework/com.sprint.hardware.twinCamDevice.jar"); - classPath = "com.sprint.hardware.twinCamDevice.FrontFacingCamera"; - exists = file.exists(); - } - if(!exists) { - return null; - } - - String dexOutputDir = ""; - if(context != null) { - dexOutputDir = context.getFilesDir().getAbsolutePath(); - File mFilesDir = new File(dexOutputDir, "dexfiles"); - if(!mFilesDir.exists()){ - // Log.e("*WEBRTCN*", "Directory doesn't exists"); - if(!mFilesDir.mkdirs()) { - // Log.e("*WEBRTCN*", "Unable to create files directory"); - } - } - } - - dexOutputDir += "/dexfiles"; - - DexClassLoader loader = - new DexClassLoader(file.getAbsolutePath(), dexOutputDir, - null, ClassLoader.getSystemClassLoader()); - - Method method = loader.loadClass(classPath).getDeclaredMethod( - "getFrontFacingCamera", (Class[]) null); - Camera camera = (Camera) method.invoke((Object[])null,(Object[]) null); - return camera; - } - - // Returns a handle to Galaxy S front camera. - // The caller is responsible to release it on completion. - private Camera AllocateGalaxySFrontCamera() { - Camera camera = Camera.open(); - Camera.Parameters parameters = camera.getParameters(); - parameters.set("camera-id",2); - camera.setParameters(parameters); - return camera; - } + int j = 0; + for (Size size : supportedSizes) { + device.width[j] = size.width; + device.height[j] = size.height; + j++; + } + // Android SDK deals in integral "milliframes per second" + // (i.e. fps*1000, instead of floating-point frames-per-second) so we + // preserve that through the Java->C++->Java round-trip. + int[] mfps = supportedFpsRanges.get(supportedFpsRanges.size() - 1); + device.name = uniqueName; + if (android.os.Build.VERSION.SDK_INT >= 9) { + device.frontFacing = isFrontFacing(info); + device.orientation = info.orientation; + device.minMilliFPS = mfps[Parameters.PREVIEW_FPS_MIN_INDEX]; + device.maxMilliFPS = mfps[Parameters.PREVIEW_FPS_MAX_INDEX]; + } else { + device.frontFacing = false; + device.orientation = 90; + device.minMilliFPS = mfps[0]; + device.maxMilliFPS = mfps[1]; + } + allDevices.add(device); + } + return allDevices.toArray(new CaptureCapabilityAndroid[0]); + } } diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.cc b/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.cc index 2de9b441e121..2b1486faa9de 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.cc +++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.cc @@ -8,495 +8,194 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include "AndroidJNIWrapper.h" #include "webrtc/modules/video_capture/android/video_capture_android.h" -#include - +#include "webrtc/modules/utility/interface/helpers_android.h" +#include "webrtc/modules/video_capture/android/device_info_android.h" #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" +#include "webrtc/system_wrappers/interface/logcat_trace_context.h" #include "webrtc/system_wrappers/interface/ref_count.h" #include "webrtc/system_wrappers/interface/trace.h" -#include "AndroidJNIWrapper.h" -#include "mozilla/Assertions.h" +static JavaVM* g_jvm = NULL; +static jclass g_java_capturer_class = NULL; // VideoCaptureAndroid.class. -namespace webrtc -{ -#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) -// TODO(leozwang) These SetAndroidVM apis will be refactored, thus we only -// keep and reference java vm. -int32_t SetCaptureAndroidVM(void* javaVM, void* javaContext) { - return videocapturemodule::VideoCaptureAndroid::SetAndroidObjects( - javaVM, - javaContext); +namespace webrtc { + +// Called by Java when the camera has a new frame to deliver. +void JNICALL ProvideCameraFrame( + JNIEnv* env, + jobject, + jbyteArray javaCameraFrame, + jint length, + jlong context, + jint rotation_deg) { + webrtc::videocapturemodule::VideoCaptureAndroid* captureModule = + reinterpret_cast( + context); + VideoCaptureRotation rotation; + if (!videocapturemodule::VideoCaptureImpl::RotationFromDegrees( + static_cast(rotation_deg), &rotation)) { + captureModule->SetCaptureRotation(rotation); + } + jbyte* cameraFrame = env->GetByteArrayElements(javaCameraFrame, NULL); + captureModule->OnIncomingFrame( + reinterpret_cast(cameraFrame), length, 0); + env->ReleaseByteArrayElements(javaCameraFrame, cameraFrame, JNI_ABORT); } -#endif -namespace videocapturemodule -{ +int32_t SetCaptureAndroidVM(JavaVM* javaVM) { + if (g_java_capturer_class) + return 0; + + g_jvm = javaVM; + AttachThreadScoped ats(g_jvm); + + videocapturemodule::DeviceInfoAndroid::Initialize(ats.env()); + + g_java_capturer_class = + jsjni_GetGlobalClassRef("org/webrtc/videoengine/VideoCaptureAndroid"); + assert(g_java_capturer_class); + + JNINativeMethod native_method = { + "ProvideCameraFrame", "([BIJI)V", + reinterpret_cast(&ProvideCameraFrame) + }; + if (ats.env()->RegisterNatives(g_java_capturer_class, &native_method, 1) != 0) + assert(false); + + return 0; +} + +namespace videocapturemodule { VideoCaptureModule* VideoCaptureImpl::Create( const int32_t id, const char* deviceUniqueIdUTF8) { - RefCountImpl* implementation = new RefCountImpl(id); - - if (!implementation || implementation->Init(id, deviceUniqueIdUTF8) != 0) { + if (implementation->Init(id, deviceUniqueIdUTF8) != 0) { delete implementation; implementation = NULL; } return implementation; } -#ifdef DEBUG -// Android logging, uncomment to print trace to -// logcat instead of trace file/callback -#include -// #undef WEBRTC_TRACE -// #define WEBRTC_TRACE(a,b,c,...) -// __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__) -// Some functions are called before before the WebRTC logging can be brought up, -// log those to the Android log. -#define EARLY_WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC-VCA", __VA_ARGS__) -#else -#define EARLY_WEBRTC_TRACE(a,b,c,...) -#endif - -JavaVM* VideoCaptureAndroid::g_jvm = NULL; -//VideoCaptureAndroid.java -jclass VideoCaptureAndroid::g_javaCmClass = NULL; -//VideoCaptureDeviceInfoAndroid.java -jclass VideoCaptureAndroid::g_javaCmDevInfoClass = NULL; -//static instance of VideoCaptureDeviceInfoAndroid.java -jobject VideoCaptureAndroid::g_javaCmDevInfoObject = NULL; - -/* - * Register references to Java Capture class. - */ -int32_t VideoCaptureAndroid::SetAndroidObjects(void* javaVM, - void* javaContext) { - - MOZ_ASSERT(javaVM != nullptr || g_javaCmDevInfoClass != nullptr); - EARLY_WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, - "%s: running", __FUNCTION__); - - g_jvm = static_cast (javaVM); - - if (javaVM) { - // Already done? Exit early. - if (g_javaCmClass != NULL - && g_javaCmDevInfoClass != NULL - && g_javaCmDevInfoObject != NULL) { - EARLY_WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, - "%s: early exit", __FUNCTION__); - return 0; - } - - JNIEnv* env = NULL; - if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { - EARLY_WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: could not get Java environment", __FUNCTION__); - return -1; - } - // get java capture class type (note path to class packet) - g_javaCmClass = jsjni_GetGlobalClassRef(AndroidJavaCaptureClass); - if (!g_javaCmClass) { - EARLY_WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: could not find java class", __FUNCTION__); - return -1; - } - JNINativeMethod nativeFunctions = - { "ProvideCameraFrame", "([BIIJ)V", - (void*) &VideoCaptureAndroid::ProvideCameraFrame }; - if (env->RegisterNatives(g_javaCmClass, &nativeFunctions, 1) == 0) { - EARLY_WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, - "%s: Registered native functions", __FUNCTION__); - } - else { - EARLY_WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: Failed to register native functions", - __FUNCTION__); - return -1; - } - - // get java capture class type (note path to class packet) - g_javaCmDevInfoClass = jsjni_GetGlobalClassRef( - AndroidJavaCaptureDeviceInfoClass); - if (!g_javaCmDevInfoClass) { - EARLY_WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: could not find java class", __FUNCTION__); - return -1; - } - - EARLY_WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, - "VideoCaptureDeviceInfoAndroid get method id"); - - // get the method ID for the Android Java CaptureClass static - //CreateVideoCaptureAndroid factory method. - jmethodID cid = env->GetStaticMethodID( - g_javaCmDevInfoClass, - "CreateVideoCaptureDeviceInfoAndroid", - "(ILandroid/content/Context;)" - "Lorg/webrtc/videoengine/VideoCaptureDeviceInfoAndroid;"); - if (cid == NULL) { - EARLY_WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: could not get java" - "VideoCaptureDeviceInfoAndroid constructor ID", - __FUNCTION__); - return -1; - } - - EARLY_WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, - "%s: construct static java device object", __FUNCTION__); - - // construct the object by calling the static constructor object - jobject javaCameraDeviceInfoObjLocal = - env->CallStaticObjectMethod(g_javaCmDevInfoClass, - cid, (int) -1, - javaContext); - bool exceptionThrown = env->ExceptionCheck(); - if (!javaCameraDeviceInfoObjLocal || exceptionThrown) { - if (exceptionThrown) { - env->ExceptionDescribe(); - env->ExceptionClear(); - } - EARLY_WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1, - "%s: could not create Java Capture Device info object", - __FUNCTION__); - return -1; - } - // create a reference to the object (to tell JNI that - // we are referencing it after this function has returned) - g_javaCmDevInfoObject = env->NewGlobalRef(javaCameraDeviceInfoObjLocal); - if (!g_javaCmDevInfoObject) { - EARLY_WEBRTC_TRACE(webrtc::kTraceError, - webrtc::kTraceAudioDevice, - -1, - "%s: could not create Java" - "cameradevinceinfo object reference", - __FUNCTION__); - return -1; - } - // Delete local object ref, we only use the global ref - env->DeleteLocalRef(javaCameraDeviceInfoObjLocal); - - EARLY_WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, - "%s: success", __FUNCTION__); - return 0; - } - else { - EARLY_WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, - "%s: JVM is NULL, assuming deinit", __FUNCTION__); - if (!g_jvm) { - EARLY_WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: SetAndroidObjects not called with a valid JVM.", - __FUNCTION__); - return -1; - } - JNIEnv* env = NULL; - bool attached = false; - if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = g_jvm->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) { - EARLY_WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, - -1, "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); - return -1; - } - attached = true; - } - env->DeleteGlobalRef(g_javaCmDevInfoObject); - env->DeleteGlobalRef(g_javaCmDevInfoClass); - env->DeleteGlobalRef(g_javaCmClass); - if (attached && g_jvm->DetachCurrentThread() < 0) { - EARLY_WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1, - "%s: Could not detach thread from JVM", __FUNCTION__); - return -1; - } - return 0; - env = (JNIEnv *) NULL; - } - return 0; +int32_t VideoCaptureAndroid::OnIncomingFrame(uint8_t* videoFrame, + int32_t videoFrameLength, + int64_t captureTime) { + return IncomingFrame( + videoFrame, videoFrameLength, _captureCapability, captureTime); } -/* - * JNI callback from Java class. Called - * when the camera has a new frame to deliver - * Class: org_webrtc_capturemodule_VideoCaptureAndroid - * Method: ProvideCameraFrame - * Signature: ([BIJ)V - */ -void JNICALL VideoCaptureAndroid::ProvideCameraFrame(JNIEnv * env, - jobject, - jbyteArray javaCameraFrame, - jint length, - jint rotation, - jlong context) { - VideoCaptureAndroid* captureModule = - reinterpret_cast(context); - WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, - -1, "%s: IncomingFrame %d", __FUNCTION__,length); - - switch (rotation) { - case 90: - captureModule->SetCaptureRotation(kCameraRotate90); - break; - case 180: - captureModule->SetCaptureRotation(kCameraRotate180); - break; - case 270: - captureModule->SetCaptureRotation(kCameraRotate270); - break; - case 0: - default: - captureModule->SetCaptureRotation(kCameraRotate0); - break; - } - - jbyte* cameraFrame= env->GetByteArrayElements(javaCameraFrame,NULL); - captureModule->IncomingFrame((uint8_t*) cameraFrame, - length,captureModule->_frameInfo,0); - env->ReleaseByteArrayElements(javaCameraFrame,cameraFrame,JNI_ABORT); -} - - - VideoCaptureAndroid::VideoCaptureAndroid(const int32_t id) - : VideoCaptureImpl(id), _capInfo(id), _javaCaptureObj(NULL), + : VideoCaptureImpl(id), + _deviceInfo(id), + _jCapturer(NULL), _captureStarted(false) { - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, - "%s: context %x", __FUNCTION__, (int) this); } -// ---------------------------------------------------------------------------- -// Init -// -// Initializes needed Java resources like the JNI interface to -// VideoCaptureAndroid.java -// ---------------------------------------------------------------------------- int32_t VideoCaptureAndroid::Init(const int32_t id, - const char* deviceUniqueIdUTF8) { + const char* deviceUniqueIdUTF8) { const int nameLength = strlen(deviceUniqueIdUTF8); - if (nameLength >= kVideoCaptureUniqueNameLength) { + if (nameLength >= kVideoCaptureUniqueNameLength) return -1; - } // Store the device name _deviceUniqueId = new char[nameLength + 1]; memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1); - if (_capInfo.Init() != 0) { - WEBRTC_TRACE(webrtc::kTraceError, - webrtc::kTraceVideoCapture, - _id, - "%s: Failed to initialize CaptureDeviceInfo", - __FUNCTION__); + AttachThreadScoped ats(g_jvm); + JNIEnv* env = ats.env(); + + jmethodID ctor = env->GetMethodID(g_java_capturer_class, "", "(IJ)V"); + assert(ctor); + jlong j_this = reinterpret_cast(this); + size_t camera_id = 0; + if (!_deviceInfo.FindCameraIndex(deviceUniqueIdUTF8, &camera_id)) return -1; - } - - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, "%s:", - __FUNCTION__); - // use the jvm that has been set - if (!g_jvm) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Not a valid Java VM pointer", __FUNCTION__); - return -1; - } - - AutoLocalJNIFrame jniFrame; - JNIEnv* env = jniFrame.GetEnv(); - if (!env) - return -1; - - jclass javaCmDevInfoClass = jniFrame.GetCmDevInfoClass(); - jobject javaCmDevInfoObject = jniFrame.GetCmDevInfoObject(); - - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id, - "get method id"); - // get the method ID for the Android Java - // CaptureDeviceInfoClass AllocateCamera factory method. - char signature[256]; - sprintf(signature, "(IJLjava/lang/String;)L%s;", AndroidJavaCaptureClass); - - jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "AllocateCamera", - signature); - if (cid == NULL) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: could not get constructor ID", __FUNCTION__); - return -1; /* exception thrown */ - } - - jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8); - // construct the object by calling the static constructor object - jobject javaCameraObjLocal = env->CallObjectMethod(javaCmDevInfoObject, - cid, (jint) id, - (jlong) this, - capureIdString); - if (!javaCameraObjLocal || jniFrame.CheckForException()) { - WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id, - "%s: could not create Java Capture object", __FUNCTION__); - return -1; - } - - // create a reference to the object (to tell JNI that we are referencing it - // after this function has returned) - _javaCaptureObj = env->NewGlobalRef(javaCameraObjLocal); - if (!_javaCaptureObj) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioDevice, _id, - "%s: could not create Java camera object reference", - __FUNCTION__); - return -1; - } - + _jCapturer = env->NewGlobalRef( + env->NewObject(g_java_capturer_class, ctor, camera_id, j_this)); + assert(_jCapturer); return 0; } VideoCaptureAndroid::~VideoCaptureAndroid() { - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, "%s:", - __FUNCTION__); - if (_javaCaptureObj == NULL || g_jvm == NULL) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: Nothing to clean", __FUNCTION__); - } - else { - AutoLocalJNIFrame jniFrame; - JNIEnv* env = jniFrame.GetEnv(); - if (!env) - return; - - // get the method ID for the Android Java CaptureClass static - // DeleteVideoCaptureAndroid method. Call this to release the camera so - // another application can use it. - jmethodID cid = env->GetStaticMethodID(g_javaCmClass, - "DeleteVideoCaptureAndroid", - "(Lorg/webrtc/videoengine/VideoCaptureAndroid;)V"); - if (cid != NULL) { - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, - "%s: Call DeleteVideoCaptureAndroid", __FUNCTION__); - // Close the camera by calling the static destruct function. - env->CallStaticVoidMethod(g_javaCmClass, cid, _javaCaptureObj); - jniFrame.CheckForException(); - - // Delete global object ref to the camera. - env->DeleteGlobalRef(_javaCaptureObj); - _javaCaptureObj = NULL; - } else { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: Failed to find DeleteVideoCaptureAndroid id", - __FUNCTION__); - } - } + // Ensure Java camera is released even if our caller didn't explicitly Stop. + if (_captureStarted) + StopCapture(); + AttachThreadScoped ats(g_jvm); + ats.env()->DeleteGlobalRef(_jCapturer); } int32_t VideoCaptureAndroid::StartCapture( const VideoCaptureCapability& capability) { CriticalSectionScoped cs(&_apiCs); - WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, - "%s: ", __FUNCTION__); + AttachThreadScoped ats(g_jvm); + JNIEnv* env = ats.env(); - int32_t result = 0; - - AutoLocalJNIFrame jniFrame; - JNIEnv* env = jniFrame.GetEnv(); - if (!env) - return -1; - - if (_capInfo.GetBestMatchedCapability(_deviceUniqueId, capability, - _frameInfo) < 0) { + if (_deviceInfo.GetBestMatchedCapability( + _deviceUniqueId, capability, _captureCapability) < 0) { WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: GetBestMatchedCapability failed. Req cap w%d h%d", + "%s: GetBestMatchedCapability failed: %dx%d", __FUNCTION__, capability.width, capability.height); return -1; } - // Store the new expected capture delay - _captureDelay = _frameInfo.expectedCaptureDelay; + _captureDelay = _captureCapability.expectedCaptureDelay; - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, - "%s: _frameInfo w%d h%d", __FUNCTION__, _frameInfo.width, - _frameInfo.height); - - // get the method ID for the Android Java - // CaptureClass static StartCapture method. - jmethodID cid = env->GetMethodID(g_javaCmClass, "StartCapture", "(III)I"); - if (cid != NULL) { - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, - "%s: Call StartCapture", __FUNCTION__); - // Close the camera by calling the static destruct function. - result = env->CallIntMethod(_javaCaptureObj, cid, _frameInfo.width, - _frameInfo.height, _frameInfo.maxFPS); - } - else { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: Failed to find StartCapture id", __FUNCTION__); - } - - if (result == 0) { + jmethodID j_start = + env->GetMethodID(g_java_capturer_class, "startCapture", "(IIII)Z"); + assert(j_start); + int min_mfps = 0; + int max_mfps = 0; + _deviceInfo.GetFpsRange(_deviceUniqueId, &min_mfps, &max_mfps); + bool started = env->CallBooleanMethod(_jCapturer, j_start, + _captureCapability.width, + _captureCapability.height, + min_mfps, max_mfps); + if (started) { _requestedCapability = capability; _captureStarted = true; } - WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, - "%s: result %d", __FUNCTION__, result); - return result; + return started ? 0 : -1; } int32_t VideoCaptureAndroid::StopCapture() { CriticalSectionScoped cs(&_apiCs); - WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, - "%s: ", __FUNCTION__); - - int32_t result = 0; - - AutoLocalJNIFrame jniFrame; - JNIEnv* env = jniFrame.GetEnv(); - if (!env) - return -1; + AttachThreadScoped ats(g_jvm); + JNIEnv* env = ats.env(); memset(&_requestedCapability, 0, sizeof(_requestedCapability)); - memset(&_frameInfo, 0, sizeof(_frameInfo)); - - // get the method ID for the Android Java CaptureClass StopCapture method. - jmethodID cid = env->GetMethodID(g_javaCmClass, "StopCapture", "()I"); - if (cid != NULL) { - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, - "%s: Call StopCapture", __FUNCTION__); - // Close the camera by calling the static destruct function. - result = env->CallIntMethod(_javaCaptureObj, cid); - } - else { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: Failed to find StopCapture id", __FUNCTION__); - } - + memset(&_captureCapability, 0, sizeof(_captureCapability)); _captureStarted = false; - WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, - "%s: result %d", __FUNCTION__, result); - return result; + jmethodID j_stop = + env->GetMethodID(g_java_capturer_class, "stopCapture", "()Z"); + return env->CallBooleanMethod(_jCapturer, j_stop) ? 0 : -1; } bool VideoCaptureAndroid::CaptureStarted() { CriticalSectionScoped cs(&_apiCs); - WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, - "%s: ", __FUNCTION__); return _captureStarted; } int32_t VideoCaptureAndroid::CaptureSettings( VideoCaptureCapability& settings) { CriticalSectionScoped cs(&_apiCs); - WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, - "%s: ", __FUNCTION__); settings = _requestedCapability; return 0; } int32_t VideoCaptureAndroid::SetCaptureRotation( VideoCaptureRotation rotation) { - CriticalSectionScoped cs(&_apiCs); - return VideoCaptureImpl::SetCaptureRotation(rotation); + // Our only caller is ProvideCameraFrame, which is called + // from a synchronized Java method. If we'd take this lock, + // any call going from C++ to Java will deadlock. + // CriticalSectionScoped cs(&_apiCs); + VideoCaptureImpl::SetCaptureRotation(rotation); + return 0; } } // namespace videocapturemodule diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.h b/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.h index 09997c076a37..3ab7189b42ef 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.h +++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.h @@ -12,164 +12,35 @@ #define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_VIDEO_CAPTURE_ANDROID_H_ #include -#include -#include "trace.h" #include "webrtc/modules/video_capture/android/device_info_android.h" #include "webrtc/modules/video_capture/video_capture_impl.h" -#define AndroidJavaCaptureClass "org/webrtc/videoengine/VideoCaptureAndroid" - namespace webrtc { namespace videocapturemodule { class VideoCaptureAndroid : public VideoCaptureImpl { public: - static int32_t SetAndroidObjects(void* javaVM, void* javaContext); VideoCaptureAndroid(const int32_t id); virtual int32_t Init(const int32_t id, const char* deviceUniqueIdUTF8); - virtual int32_t StartCapture( - const VideoCaptureCapability& capability); + virtual int32_t StartCapture(const VideoCaptureCapability& capability); virtual int32_t StopCapture(); virtual bool CaptureStarted(); virtual int32_t CaptureSettings(VideoCaptureCapability& settings); virtual int32_t SetCaptureRotation(VideoCaptureRotation rotation); - friend class AutoLocalJNIFrame; + int32_t OnIncomingFrame(uint8_t* videoFrame, + int32_t videoFrameLength, + int64_t captureTime = 0); protected: virtual ~VideoCaptureAndroid(); - static void JNICALL ProvideCameraFrame (JNIEnv * env, - jobject, - jbyteArray javaCameraFrame, - jint length, - jint rotation, - jlong context); - DeviceInfoAndroid _capInfo; - jobject _javaCaptureObj; // Java Camera object. - VideoCaptureCapability _frameInfo; + + DeviceInfoAndroid _deviceInfo; + jobject _jCapturer; // Global ref to Java VideoCaptureAndroid object. + VideoCaptureCapability _captureCapability; bool _captureStarted; - - static JavaVM* g_jvm; - static jclass g_javaCmClass; - static jclass g_javaCmDevInfoClass; - //Static java object implementing the needed device info functions; - static jobject g_javaCmDevInfoObject; -}; - -// Reworked version of what is available in AndroidBridge, -// can attach/deatch in addition to push/pop frames. -class AutoLocalJNIFrame { -public: - AutoLocalJNIFrame(int nEntries = 128) - : mEntries(nEntries), mHasFrameBeenPushed(false), mAttached(false) - { - mJNIEnv = InitJNIEnv(); - Push(); - } - - JNIEnv* GetEnv() { - return mJNIEnv; - } - - jclass GetCmDevInfoClass() { - assert(VideoCaptureAndroid::g_javaCmDevInfoClass != nullptr); - return VideoCaptureAndroid::g_javaCmDevInfoClass; - } - - jobject GetCmDevInfoObject() { - assert(VideoCaptureAndroid::g_javaCmDevInfoObject != nullptr); - return VideoCaptureAndroid::g_javaCmDevInfoObject; - } - - bool CheckForException() { - if (mJNIEnv->ExceptionCheck()) { - mJNIEnv->ExceptionDescribe(); - mJNIEnv->ExceptionClear(); - return true; - } - - return false; - } - - ~AutoLocalJNIFrame() { - if (!mJNIEnv) - return; - - CheckForException(); - - if (mHasFrameBeenPushed) - mJNIEnv->PopLocalFrame(NULL); - - if (mAttached) { - int res = VideoCaptureAndroid::g_jvm->DetachCurrentThread(); - if (res < 0) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: JVM Detach failed.", __FUNCTION__); - } - } - } - -private: - void Push() { - if (!mJNIEnv) - return; - - // Make sure there is enough space to store a local ref to the - // exception. I am not completely sure this is needed, but does - // not hurt. - jint ret = mJNIEnv->PushLocalFrame(mEntries + 1); - assert(ret == 0); - if (ret < 0) - CheckForException(); - else - mHasFrameBeenPushed = true; - } - - JNIEnv* InitJNIEnv() - { - JNIEnv* env = nullptr; - - // Get the JNI env for this thread. - if (!VideoCaptureAndroid::g_jvm) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: SetAndroidObjects not called with a valid JVM.", - __FUNCTION__); - return nullptr; - } - - jint res = VideoCaptureAndroid::g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4); - if (res == JNI_EDETACHED) { - // Try to attach this thread to the JVM and get the env. - res = VideoCaptureAndroid::g_jvm->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) { - // Attaching failed, error out. - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); - return nullptr; - } - mAttached = true; - WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, - "%s: attach success", __FUNCTION__); - } else if (res == JNI_OK) { - // Already attached, GetEnv succeeded. - WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, - "%s: did not attach because JVM Env already present", - __FUNCTION__); - } else { - // Non-recoverable error in GetEnv. - return nullptr; - } - - return env; - } - - int mEntries; - JNIEnv* mJNIEnv; - bool mHasFrameBeenPushed; - bool mAttached; }; } // namespace videocapturemodule diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/device_info_impl.cc b/media/webrtc/trunk/webrtc/modules/video_capture/device_info_impl.cc index edf1d1ad5334..434bcb5321a6 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/device_info_impl.cc +++ b/media/webrtc/trunk/webrtc/modules/video_capture/device_info_impl.cc @@ -31,13 +31,6 @@ DeviceInfoImpl::DeviceInfoImpl(const int32_t id) DeviceInfoImpl::~DeviceInfoImpl(void) { _apiLock.AcquireLockExclusive(); - - for (VideoCaptureCapabilityMap::iterator it = _captureCapabilities.begin(); - it != _captureCapabilities.end(); - ++it) { - delete it->second; - } - free(_lastUsedDeviceName); _apiLock.ReleaseLockExclusive(); @@ -124,23 +117,7 @@ int32_t DeviceInfoImpl::GetCapability(const char* deviceUniqueIdUTF8, return -1; } - VideoCaptureCapabilityMap::iterator item = - _captureCapabilities.find(deviceCapabilityNumber); - - if (item == _captureCapabilities.end()) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "Failed to find capability number %d of %d possible", - deviceCapabilityNumber, _captureCapabilities.size()); - return -1; - } - - if (item->second == NULL) - { - return -1; - } - - capability = *item->second; + capability = _captureCapabilities[deviceCapabilityNumber]; return 0; } @@ -188,11 +165,7 @@ int32_t DeviceInfoImpl::GetBestMatchedCapability( for (int32_t tmp = 0; tmp < numberOfCapabilies; ++tmp) // Loop through all capabilities { - VideoCaptureCapabilityMap::iterator item = _captureCapabilities.find(tmp); - if (item == _captureCapabilities.end()) - return -1; - - VideoCaptureCapability& capability = *item->second; + VideoCaptureCapability& capability = _captureCapabilities[tmp]; const int32_t diffWidth = capability.width - requested.width; const int32_t diffHeight = capability.height - requested.height; @@ -298,15 +271,9 @@ int32_t DeviceInfoImpl::GetBestMatchedCapability( bestWidth, bestHeight, bestFrameRate, bestRawType); // Copy the capability - VideoCaptureCapabilityMap::iterator item = - _captureCapabilities.find(bestformatIndex); - if (item == _captureCapabilities.end()) + if (bestformatIndex < 0) return -1; - if (item->second == NULL) - return -1; - - resulting = *item->second; - + resulting = _captureCapabilities[bestformatIndex]; return bestformatIndex; } diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/device_info_impl.h b/media/webrtc/trunk/webrtc/modules/video_capture/device_info_impl.h index dc4c08c68742..f0e66f44dec2 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/device_info_impl.h +++ b/media/webrtc/trunk/webrtc/modules/video_capture/device_info_impl.h @@ -11,7 +11,7 @@ #ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_DEVICE_INFO_IMPL_H_ #define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_DEVICE_INFO_IMPL_H_ -#include +#include #include "webrtc/modules/video_capture/include/video_capture.h" #include "webrtc/modules/video_capture/video_capture_delay.h" @@ -58,8 +58,8 @@ protected: protected: // Data members int32_t _id; - typedef std::map VideoCaptureCapabilityMap; - VideoCaptureCapabilityMap _captureCapabilities; + typedef std::vector VideoCaptureCapabilities; + VideoCaptureCapabilities _captureCapabilities; RWLockWrapper& _apiLock; char* _lastUsedDeviceName; uint32_t _lastUsedDeviceNameLength; diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/include/video_capture.h b/media/webrtc/trunk/webrtc/modules/video_capture/include/video_capture.h index e3d674b1e468..b2309778bae8 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/include/video_capture.h +++ b/media/webrtc/trunk/webrtc/modules/video_capture/include/video_capture.h @@ -14,10 +14,14 @@ #include "webrtc/modules/interface/module.h" #include "webrtc/modules/video_capture/include/video_capture_defines.h" +#if defined(ANDROID) && !defined(WEBRTC_GONK) +#include +#endif + namespace webrtc { -#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) -int32_t SetCaptureAndroidVM(void* javaVM, void* javaContext); +#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) && !defined(WEBRTC_GONK) +int32_t SetCaptureAndroidVM(JavaVM* javaVM); #endif class VideoCaptureModule: public RefCountedModule { @@ -101,18 +105,17 @@ class VideoCaptureModule: public RefCountedModule { }; // Register capture data callback - virtual int32_t RegisterCaptureDataCallback( + virtual void RegisterCaptureDataCallback( VideoCaptureDataCallback& dataCallback) = 0; // Remove capture data callback - virtual int32_t DeRegisterCaptureDataCallback() = 0; + virtual void DeRegisterCaptureDataCallback() = 0; // Register capture callback. - virtual int32_t RegisterCaptureCallback( - VideoCaptureFeedBack& callBack) = 0; + virtual void RegisterCaptureCallback(VideoCaptureFeedBack& callBack) = 0; // Remove capture callback. - virtual int32_t DeRegisterCaptureCallback() = 0; + virtual void DeRegisterCaptureCallback() = 0; // Start capture device virtual int32_t StartCapture( @@ -129,7 +132,7 @@ class VideoCaptureModule: public RefCountedModule { // Gets the current configuration. virtual int32_t CaptureSettings(VideoCaptureCapability& settings) = 0; - virtual int32_t SetCaptureDelay(int32_t delayMS) = 0; + virtual void SetCaptureDelay(int32_t delayMS) = 0; // Returns the current CaptureDelay. Only valid when the camera is running. virtual int32_t CaptureDelay() = 0; @@ -145,8 +148,8 @@ class VideoCaptureModule: public RefCountedModule { virtual VideoCaptureEncodeInterface* GetEncodeInterface( const VideoCodec& codec) = 0; - virtual int32_t EnableFrameRateCallback(const bool enable) = 0; - virtual int32_t EnableNoPictureAlarm(const bool enable) = 0; + virtual void EnableFrameRateCallback(const bool enable) = 0; + virtual void EnableNoPictureAlarm(const bool enable) = 0; protected: virtual ~VideoCaptureModule() {}; diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/include/video_capture_defines.h b/media/webrtc/trunk/webrtc/modules/video_capture/include/video_capture_defines.h index c592633b6418..330bfc75dafa 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/include/video_capture_defines.h +++ b/media/webrtc/trunk/webrtc/modules/video_capture/include/video_capture_defines.h @@ -84,33 +84,6 @@ enum VideoCaptureAlarm Cleared = 1 }; -// VideoFrameI420 doesn't take the ownership of the buffer. -// It's mostly used to group the parameters for external capture. -struct VideoFrameI420 -{ - VideoFrameI420() { - y_plane = NULL; - u_plane = NULL; - v_plane = NULL; - y_pitch = 0; - u_pitch = 0; - v_pitch = 0; - width = 0; - height = 0; - } - - unsigned char* y_plane; - unsigned char* u_plane; - unsigned char* v_plane; - - int y_pitch; - int u_pitch; - int v_pitch; - - unsigned short width; - unsigned short height; -}; - /* External Capture interface. Returned by Create and implemented by the capture module. */ @@ -122,8 +95,9 @@ public: int32_t videoFrameLength, const VideoCaptureCapability& frameInfo, int64_t captureTime = 0) = 0; - virtual int32_t IncomingFrameI420(const VideoFrameI420& video_frame, - int64_t captureTime = 0) = 0; + virtual int32_t IncomingI420VideoFrame(I420VideoFrame* video_frame, + int64_t captureTime = 0) = 0; + protected: ~VideoCaptureExternal() {} }; diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/linux/device_info_linux.cc b/media/webrtc/trunk/webrtc/modules/video_capture/linux/device_info_linux.cc index b060fa67a7c8..2c4fb09fa319 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/linux/device_info_linux.cc +++ b/media/webrtc/trunk/webrtc/modules/video_capture/linux/device_info_linux.cc @@ -236,16 +236,10 @@ int32_t DeviceInfoLinux::CreateCapabilityMap( } // now fd will point to the matching device - // reset old capability map - for (std::map::iterator it = - _captureCapabilities.begin(); - it != _captureCapabilities.end(); - ++it) { - delete it->second; - } + // reset old capability list. _captureCapabilities.clear(); - int size = FillCapabilityMap(fd); + int size = FillCapabilities(fd); close(fd); // Store the new used device name @@ -271,7 +265,7 @@ bool DeviceInfoLinux::IsDeviceNameMatches(const char* name, return false; } -int32_t DeviceInfoLinux::FillCapabilityMap(int fd) +int32_t DeviceInfoLinux::FillCapabilities(int fd) { // set image format @@ -308,39 +302,39 @@ int32_t DeviceInfoLinux::FillCapabilityMap(int fd) if ((video_fmt.fmt.pix.width == size[i][0]) && (video_fmt.fmt.pix.height == size[i][1])) { - VideoCaptureCapability *cap = new VideoCaptureCapability(); - cap->width = video_fmt.fmt.pix.width; - cap->height = video_fmt.fmt.pix.height; - cap->expectedCaptureDelay = 120; + VideoCaptureCapability cap; + cap.width = video_fmt.fmt.pix.width; + cap.height = video_fmt.fmt.pix.height; + cap.expectedCaptureDelay = 120; if (videoFormats[fmts] == V4L2_PIX_FMT_YUYV) { - cap->rawType = kVideoYUY2; + cap.rawType = kVideoYUY2; } else if (videoFormats[fmts] == V4L2_PIX_FMT_YUV420) { - cap->rawType = kVideoI420; + cap.rawType = kVideoI420; } else if (videoFormats[fmts] == V4L2_PIX_FMT_MJPEG) { - cap->rawType = kVideoMJPEG; + cap.rawType = kVideoMJPEG; } // get fps of current camera mode // V4l2 does not have a stable method of knowing so we just guess. - if(cap->width >= 800 && cap->rawType != kVideoMJPEG) + if(cap.width >= 800 && cap.rawType != kVideoMJPEG) { - cap->maxFPS = 15; + cap.maxFPS = 15; } else { - cap->maxFPS = 30; + cap.maxFPS = 30; } - _captureCapabilities[index] = cap; + _captureCapabilities.push_back(cap); index++; WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id, "Camera capability, width:%d height:%d type:%d fps:%d", - cap->width, cap->height, cap->rawType, cap->maxFPS); + cap.width, cap.height, cap.rawType, cap.maxFPS); } } } diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/linux/device_info_linux.h b/media/webrtc/trunk/webrtc/modules/video_capture/linux/device_info_linux.h index 3309f26fdae7..cffb22256ce2 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/linux/device_info_linux.h +++ b/media/webrtc/trunk/webrtc/modules/video_capture/linux/device_info_linux.h @@ -42,7 +42,7 @@ public: void* /*parentWindow*/, uint32_t /*positionX*/, uint32_t /*positionY*/) { return -1;} - int32_t FillCapabilityMap(int fd); + int32_t FillCapabilities(int fd); int32_t Init(); private: diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/mac/video_capture_mac.mm b/media/webrtc/trunk/webrtc/modules/video_capture/mac/video_capture_mac.mm index 4babbbaf5fa2..aeb8de6ae4e9 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/mac/video_capture_mac.mm +++ b/media/webrtc/trunk/webrtc/modules/video_capture/mac/video_capture_mac.mm @@ -27,8 +27,8 @@ #include #include #else -#include "qtkit/video_capture_qtkit.h" -#include "qtkit/video_capture_qtkit_info.h" +#include "webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit.h" +#include "webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.h" #endif namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/test/video_capture_unittest.cc b/media/webrtc/trunk/webrtc/modules/video_capture/test/video_capture_unittest.cc index b047bee4ff32..98b6aa61a06b 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/test/video_capture_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/video_capture/test/video_capture_unittest.cc @@ -84,60 +84,6 @@ static bool CompareFrames(const webrtc::I420VideoFrame& frame1, return true; } -// Compares the content of a I420 frame in planar form and the new video frame. -static bool CompareFrames(const webrtc::VideoFrameI420& frame1, - const webrtc::I420VideoFrame& frame2) { - if (frame1.width != frame2.width() || - frame1.height != frame2.height()) { - return false; - } - - // Compare Y - const unsigned char* y_plane = frame1.y_plane; - const unsigned char* y_plane2 = frame2.buffer(webrtc::kYPlane); - for (int i = 0; i < frame2.height(); ++i) { - for (int j = 0; j < frame2.width(); ++j) { - if (*y_plane != *y_plane2) - return false; - ++y_plane; - ++y_plane2; - } - y_plane += frame1.y_pitch - frame1.width; - y_plane2 += frame2.stride(webrtc::kYPlane) - frame2.width(); - } - - // Compare U - const unsigned char* u_plane = frame1.u_plane; - const unsigned char* u_plane2 = frame2.buffer(webrtc::kUPlane); - for (int i = 0; i < (frame2.height() + 1) / 2; ++i) { - for (int j = 0; j < (frame2.width() + 1) / 2; ++j) { - if (*u_plane != *u_plane2) - return false; - ++u_plane; - ++u_plane2; - } - u_plane += frame1.u_pitch - (frame1.width + 1) / 2; - u_plane2+= frame2.stride(webrtc::kUPlane) - (frame2.width() + 1) / 2; - } - - // Compare V - unsigned char* v_plane = frame1.v_plane; - const unsigned char* v_plane2 = frame2.buffer(webrtc::kVPlane); - for (int i = 0; i < frame2.height() /2; ++i) { - for (int j = 0; j < frame2.width() /2; ++j) { - if (*u_plane != *u_plane2) { - return false; - } - ++v_plane; - ++v_plane2; - } - v_plane += frame1.v_pitch - (frame1.width + 1) / 2; - u_plane2+= frame2.stride(webrtc::kVPlane) - (frame2.width() + 1) / 2; - } - return true; -} - - class TestVideoCaptureCallback : public VideoCaptureDataCallback { public: TestVideoCaptureCallback() @@ -229,11 +175,6 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback { return CompareFrames(last_frame_, frame); } - bool CompareLastFrame(const webrtc::VideoFrameI420& frame) { - CriticalSectionScoped cs(capture_cs_.get()); - return CompareFrames(frame, last_frame_); - } - void SetExpectedCaptureRotation(webrtc::VideoCaptureRotation rotation) { CriticalSectionScoped cs(capture_cs_.get()); rotate_frame_ = rotation; @@ -311,7 +252,7 @@ class VideoCaptureTest : public testing::Test { EXPECT_FALSE(module->CaptureStarted()); - EXPECT_EQ(0, module->RegisterCaptureDataCallback(*callback)); + module->RegisterCaptureDataCallback(*callback); return module; } @@ -467,11 +408,10 @@ class VideoCaptureExternalTest : public testing::Test { memset(test_frame_.buffer(webrtc::kVPlane), 127, ((kTestWidth + 1) / 2) * ((kTestHeight + 1) / 2)); - EXPECT_EQ(0, capture_module_->RegisterCaptureDataCallback( - capture_callback_)); - EXPECT_EQ(0, capture_module_->RegisterCaptureCallback(capture_feedback_)); - EXPECT_EQ(0, capture_module_->EnableFrameRateCallback(true)); - EXPECT_EQ(0, capture_module_->EnableNoPictureAlarm(true)); + capture_module_->RegisterCaptureDataCallback(capture_callback_); + capture_module_->RegisterCaptureCallback(capture_feedback_); + capture_module_->EnableFrameRateCallback(true); + capture_module_->EnableNoPictureAlarm(true); } void TearDown() { @@ -503,16 +443,11 @@ TEST_F(VideoCaptureExternalTest, TestExternalCapture) { // NOTE: flaky, sometimes fails on the last CompareLastFrame. // http://code.google.com/p/webrtc/issues/detail?id=777 TEST_F(VideoCaptureExternalTest, DISABLED_TestExternalCaptureI420) { - webrtc::VideoFrameI420 frame_i420; - frame_i420.width = kTestWidth; - frame_i420.height = kTestHeight; - frame_i420.y_plane = test_frame_.buffer(webrtc::kYPlane); - frame_i420.u_plane = frame_i420.y_plane + (kTestWidth * kTestHeight); - frame_i420.v_plane = frame_i420.u_plane + ((kTestWidth * kTestHeight) >> 2); - frame_i420.y_pitch = kTestWidth; - frame_i420.u_pitch = kTestWidth / 2; - frame_i420.v_pitch = kTestWidth / 2; - EXPECT_EQ(0, capture_input_interface_->IncomingFrameI420(frame_i420, 0)); + webrtc::I420VideoFrame frame_i420; + frame_i420.CopyFrame(test_frame_); + + EXPECT_EQ(0, + capture_input_interface_->IncomingI420VideoFrame(&frame_i420, 0)); EXPECT_TRUE(capture_callback_.CompareLastFrame(frame_i420)); // Test with a frame with pitch not equal to width @@ -566,16 +501,10 @@ TEST_F(VideoCaptureExternalTest, DISABLED_TestExternalCaptureI420) { current_pointer += v_pitch; v_plane += uv_width; } - frame_i420.width = kTestWidth; - frame_i420.height = kTestHeight; - frame_i420.y_plane = aligned_test_frame.buffer(webrtc::kYPlane); - frame_i420.u_plane = aligned_test_frame.buffer(webrtc::kYPlane); - frame_i420.v_plane = aligned_test_frame.buffer(webrtc::kVPlane); - frame_i420.y_pitch = y_pitch; - frame_i420.u_pitch = u_pitch; - frame_i420.v_pitch = v_pitch; + frame_i420.CopyFrame(aligned_test_frame); - EXPECT_EQ(0, capture_input_interface_->IncomingFrameI420(frame_i420, 0)); + EXPECT_EQ(0, + capture_input_interface_->IncomingI420VideoFrame(&frame_i420, 0)); EXPECT_TRUE(capture_callback_.CompareLastFrame(test_frame_)); } diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/video_capture.gypi b/media/webrtc/trunk/webrtc/modules/video_capture/video_capture.gypi index a8e1ce3e73c0..636a982fa2bd 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/video_capture.gypi +++ b/media/webrtc/trunk/webrtc/modules/video_capture/video_capture.gypi @@ -16,16 +16,9 @@ '<(webrtc_root)/common_video/common_video.gyp:common_video', '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', ], - 'cflags_mozilla': [ '$(NSPR_CFLAGS)', ], - - 'include_dirs': [ - 'include', - '../interface', - '<(webrtc_root)/common_video/libyuv/include', - ], 'sources': [ 'device_info_impl.cc', 'device_info_impl.h', @@ -47,9 +40,6 @@ }, { # include_internal_video_capture == 1 'conditions': [ ['include_v4l2_video_capture==1', { - 'include_dirs': [ - 'linux', - ], 'sources': [ 'linux/device_info_linux.cc', 'linux/device_info_linux.h', @@ -70,9 +60,6 @@ 'mac/qtkit/video_capture_qtkit_utility.h', 'mac/video_capture_mac.mm', ], - 'include_dirs': [ - 'mac', - ], 'link_settings': { 'xcode_settings': { 'OTHER_LDFLAGS': [ @@ -89,9 +76,6 @@ ], }], ], - 'include_dirs': [ - 'windows', - ], 'sources': [ 'windows/device_info_ds.cc', 'windows/device_info_ds.h', @@ -118,9 +102,6 @@ }, }], # win ['OS=="android"', { - 'include_dirs': [ - 'android', - ], 'sources': [ 'android/device_info_android.cc', 'android/device_info_android.h', @@ -167,9 +148,6 @@ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', '<(DEPTH)/testing/gtest.gyp:gtest', ], - 'include_dirs': [ - 'include', - ], 'sources': [ 'test/video_capture_unittest.cc', 'test/video_capture_main_mac.mm', @@ -222,10 +200,10 @@ 'target_name': 'video_capture_tests_run', 'type': 'none', 'dependencies': [ - '<(import_isolate_path):import_isolate_gypi', 'video_capture_tests', ], 'includes': [ + '../../build/isolate.gypi', 'video_capture_tests.isolate', ], 'sources': [ diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/video_capture_impl.cc b/media/webrtc/trunk/webrtc/modules/video_capture/video_capture_impl.cc index 56d6d18f3324..e83c7a469cb2 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/video_capture_impl.cc +++ b/media/webrtc/trunk/webrtc/modules/video_capture/video_capture_impl.cc @@ -41,6 +41,47 @@ const char* VideoCaptureImpl::CurrentDeviceName() const return _deviceUniqueId; } +// static +int32_t VideoCaptureImpl::RotationFromDegrees(int degrees, + VideoCaptureRotation* rotation) { + switch (degrees) { + case 0: + *rotation = kCameraRotate0; + return 0; + case 90: + *rotation = kCameraRotate90; + return 0; + case 180: + *rotation = kCameraRotate180; + return 0; + case 270: + *rotation = kCameraRotate270; + return 0; + default: + return -1;; + } +} + +// static +int32_t VideoCaptureImpl::RotationInDegrees(VideoCaptureRotation rotation, + int* degrees) { + switch (rotation) { + case kCameraRotate0: + *degrees = 0; + return 0; + case kCameraRotate90: + *degrees = 90; + return 0; + case kCameraRotate180: + *degrees = 180; + return 0; + case kCameraRotate270: + *degrees = 270; + return 0; + } + return -1; +} + int32_t VideoCaptureImpl::ChangeUniqueId(const int32_t id) { _id = id; @@ -123,7 +164,7 @@ VideoCaptureImpl::VideoCaptureImpl(const int32_t id) _captureCallBack(NULL), _lastProcessFrameCount(TickTime::Now()), _rotateFrame(kRotateNone), - last_capture_time_(TickTime::MillisecondTimestamp()), + last_capture_time_(0), delta_ntp_internal_ms_( Clock::GetRealTimeClock()->CurrentNtpInMilliseconds() - TickTime::MillisecondTimestamp()) { @@ -146,45 +187,33 @@ VideoCaptureImpl::~VideoCaptureImpl() delete[] _deviceUniqueId; } -int32_t VideoCaptureImpl::RegisterCaptureDataCallback( - VideoCaptureDataCallback& dataCallBack) -{ +void VideoCaptureImpl::RegisterCaptureDataCallback( + VideoCaptureDataCallback& dataCallBack) { CriticalSectionScoped cs(&_apiCs); CriticalSectionScoped cs2(&_callBackCs); _dataCallBack = &dataCallBack; - - return 0; } -int32_t VideoCaptureImpl::DeRegisterCaptureDataCallback() -{ +void VideoCaptureImpl::DeRegisterCaptureDataCallback() { CriticalSectionScoped cs(&_apiCs); CriticalSectionScoped cs2(&_callBackCs); _dataCallBack = NULL; - return 0; } -int32_t VideoCaptureImpl::RegisterCaptureCallback(VideoCaptureFeedBack& callBack) -{ +void VideoCaptureImpl::RegisterCaptureCallback(VideoCaptureFeedBack& callBack) { CriticalSectionScoped cs(&_apiCs); CriticalSectionScoped cs2(&_callBackCs); _captureCallBack = &callBack; - return 0; } -int32_t VideoCaptureImpl::DeRegisterCaptureCallback() -{ +void VideoCaptureImpl::DeRegisterCaptureCallback() { CriticalSectionScoped cs(&_apiCs); CriticalSectionScoped cs2(&_callBackCs); _captureCallBack = NULL; - return 0; - } -int32_t VideoCaptureImpl::SetCaptureDelay(int32_t delayMS) -{ +void VideoCaptureImpl::SetCaptureDelay(int32_t delayMS) { CriticalSectionScoped cs(&_apiCs); _captureDelay = delayMS; - return 0; } int32_t VideoCaptureImpl::CaptureDelay() { @@ -235,8 +264,6 @@ int32_t VideoCaptureImpl::IncomingFrame( "IncomingFrame width %d, height %d", (int) frameInfo.width, (int) frameInfo.height); - TickTime startProcessTime = TickTime::Now(); - CriticalSectionScoped cs(&_callBackCs); const int32_t width = frameInfo.width; @@ -305,39 +332,14 @@ int32_t VideoCaptureImpl::IncomingFrame( return -1; } - const uint32_t processTime = - (uint32_t)(TickTime::Now() - startProcessTime).Milliseconds(); - if (processTime > 10) // If the process time is too long MJPG will not work well. - { - WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id, - "Too long processing time of Incoming frame: %ums", - (unsigned int) processTime); - } - return 0; } -int32_t VideoCaptureImpl::IncomingFrameI420( - const VideoFrameI420& video_frame, int64_t captureTime) { +int32_t VideoCaptureImpl::IncomingI420VideoFrame(I420VideoFrame* video_frame, + int64_t captureTime) { CriticalSectionScoped cs(&_callBackCs); - int size_y = video_frame.height * video_frame.y_pitch; - int size_u = video_frame.u_pitch * ((video_frame.height + 1) / 2); - int size_v = video_frame.v_pitch * ((video_frame.height + 1) / 2); - // TODO(mikhal): Can we use Swap here? This will do a memcpy. - int ret = _captureFrame.CreateFrame(size_y, video_frame.y_plane, - size_u, video_frame.u_plane, - size_v, video_frame.v_plane, - video_frame.width, video_frame.height, - video_frame.y_pitch, video_frame.u_pitch, - video_frame.v_pitch); - if (ret < 0) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "Failed to create I420VideoFrame"); - return -1; - } - - DeliverCapturedFrame(_captureFrame, captureTime); + DeliverCapturedFrame(*video_frame, captureTime); return 0; } @@ -358,12 +360,13 @@ int32_t VideoCaptureImpl::SetCaptureRotation(VideoCaptureRotation rotation) { case kCameraRotate270: _rotateFrame = kRotate270; break; + default: + return -1; } return 0; } -int32_t VideoCaptureImpl::EnableFrameRateCallback(const bool enable) -{ +void VideoCaptureImpl::EnableFrameRateCallback(const bool enable) { CriticalSectionScoped cs(&_apiCs); CriticalSectionScoped cs2(&_callBackCs); _frameRateCallBack = enable; @@ -371,15 +374,12 @@ int32_t VideoCaptureImpl::EnableFrameRateCallback(const bool enable) { _lastFrameRateCallbackTime = TickTime::Now(); } - return 0; } -int32_t VideoCaptureImpl::EnableNoPictureAlarm(const bool enable) -{ +void VideoCaptureImpl::EnableNoPictureAlarm(const bool enable) { CriticalSectionScoped cs(&_apiCs); CriticalSectionScoped cs2(&_callBackCs); _noPictureAlarmCallBack = enable; - return 0; } void VideoCaptureImpl::UpdateFrameCount() diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/video_capture_impl.h b/media/webrtc/trunk/webrtc/modules/video_capture/video_capture_impl.h index 65605068f88f..f3a4c64cbd60 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/video_capture_impl.h +++ b/media/webrtc/trunk/webrtc/modules/video_capture/video_capture_impl.h @@ -51,21 +51,29 @@ public: static DeviceInfo* CreateDeviceInfo(const int32_t id); + // Helpers for converting between (integral) degrees and + // VideoCaptureRotation values. Return 0 on success. + static int32_t RotationFromDegrees(int degrees, + VideoCaptureRotation* rotation); + static int32_t RotationInDegrees(VideoCaptureRotation rotation, + int* degrees); + // Implements Module declared functions. virtual int32_t ChangeUniqueId(const int32_t id); //Call backs - virtual int32_t RegisterCaptureDataCallback(VideoCaptureDataCallback& dataCallback); - virtual int32_t DeRegisterCaptureDataCallback(); - virtual int32_t RegisterCaptureCallback(VideoCaptureFeedBack& callBack); - virtual int32_t DeRegisterCaptureCallback(); + virtual void RegisterCaptureDataCallback( + VideoCaptureDataCallback& dataCallback); + virtual void DeRegisterCaptureDataCallback(); + virtual void RegisterCaptureCallback(VideoCaptureFeedBack& callBack); + virtual void DeRegisterCaptureCallback(); - virtual int32_t SetCaptureDelay(int32_t delayMS); + virtual void SetCaptureDelay(int32_t delayMS); virtual int32_t CaptureDelay(); virtual int32_t SetCaptureRotation(VideoCaptureRotation rotation); - virtual int32_t EnableFrameRateCallback(const bool enable); - virtual int32_t EnableNoPictureAlarm(const bool enable); + virtual void EnableFrameRateCallback(const bool enable); + virtual void EnableNoPictureAlarm(const bool enable); virtual const char* CurrentDeviceName() const; @@ -79,9 +87,9 @@ public: int32_t videoFrameLength, const VideoCaptureCapability& frameInfo, int64_t captureTime = 0); - virtual int32_t IncomingFrameI420( - const VideoFrameI420& video_frame, - int64_t captureTime = 0); + + virtual int32_t IncomingI420VideoFrame(I420VideoFrame* video_frame, + int64_t captureTime = 0); // Platform dependent virtual int32_t StartCapture(const VideoCaptureCapability& capability) diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/video_capture_tests.isolate b/media/webrtc/trunk/webrtc/modules/video_capture/video_capture_tests.isolate index bafbd8b4c52b..30374ce4e4a3 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/video_capture_tests.isolate +++ b/media/webrtc/trunk/webrtc/modules/video_capture/video_capture_tests.isolate @@ -21,16 +21,15 @@ 'variables': { 'command': [ '../../../testing/test_env.py', - '../../../tools/swarm_client/googletest/run_test_cases.py', '<(PRODUCT_DIR)/video_capture_tests<(EXECUTABLE_SUFFIX)', ], 'isolate_dependency_tracked': [ '../../../testing/test_env.py', - '../../../tools/swarm_client/run_isolated.py', - '../../../tools/swarm_client/googletest/run_test_cases.py', - '../../../tools/swarm_client/third_party/upload.py', '<(PRODUCT_DIR)/video_capture_tests<(EXECUTABLE_SUFFIX)', ], + 'isolate_dependency_untracked': [ + '../../../tools/swarming_client/', + ], }, }], ], diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/windows/device_info_ds.cc b/media/webrtc/trunk/webrtc/modules/video_capture/windows/device_info_ds.cc index 09de9cf604a1..fee437c50502 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/windows/device_info_ds.cc +++ b/media/webrtc/trunk/webrtc/modules/video_capture/windows/device_info_ds.cc @@ -392,13 +392,12 @@ int32_t DeviceInfoDS::GetWindowsCapability( VideoCaptureCapabilityWindows& windowsCapability) { ReadLockScoped cs(_apiLock); - std::map::iterator item = - _captureCapabilities.find(capabilityIndex); - if (item == _captureCapabilities.end()) + if (capabilityIndex < 0 || static_cast(capabilityIndex) >= + _captureCapabilitiesWindows.size()) { return -1; + } - windowsCapability = - *static_cast(item->second); + windowsCapability = _captureCapabilitiesWindows[capabilityIndex]; return 0; } @@ -407,13 +406,6 @@ int32_t DeviceInfoDS::CreateCapabilityMap( { // Reset old capability list - for (std::map::iterator it = - _captureCapabilities.begin(); - it != _captureCapabilities.end(); - ++it) { - delete it->second; - } - _captureCapabilities.clear(); const int32_t deviceUniqueIdUTF8Length = @@ -489,7 +481,6 @@ int32_t DeviceInfoDS::CreateCapabilityMap( return -1; } - int32_t index = 0; // Index in created _capabilities map // Check if the device support formattype == FORMAT_VideoInfo2 and FORMAT_VideoInfo. // Prefer FORMAT_VideoInfo since some cameras (ZureCam) has been seen having problem with MJPEG and FORMAT_VideoInfo2 // Interlace flag is only supported in FORMAT_VideoInfo2 @@ -556,8 +547,7 @@ int32_t DeviceInfoDS::CreateCapabilityMap( && pmt->formattype == preferedVideoFormat) { - VideoCaptureCapabilityWindows* capability = - new VideoCaptureCapabilityWindows(); + VideoCaptureCapabilityWindows capability; int64_t avgTimePerFrame = 0; if (pmt->formattype == FORMAT_VideoInfo) @@ -565,9 +555,9 @@ int32_t DeviceInfoDS::CreateCapabilityMap( VIDEOINFOHEADER* h = reinterpret_cast (pmt->pbFormat); assert(h); - capability->directShowCapabilityIndex = tmp; - capability->width = h->bmiHeader.biWidth; - capability->height = h->bmiHeader.biHeight; + capability.directShowCapabilityIndex = tmp; + capability.width = h->bmiHeader.biWidth; + capability.height = h->bmiHeader.biHeight; avgTimePerFrame = h->AvgTimePerFrame; } if (pmt->formattype == FORMAT_VideoInfo2) @@ -575,10 +565,10 @@ int32_t DeviceInfoDS::CreateCapabilityMap( VIDEOINFOHEADER2* h = reinterpret_cast (pmt->pbFormat); assert(h); - capability->directShowCapabilityIndex = tmp; - capability->width = h->bmiHeader.biWidth; - capability->height = h->bmiHeader.biHeight; - capability->interlaced = h->dwInterlaceFlags + capability.directShowCapabilityIndex = tmp; + capability.width = h->bmiHeader.biWidth; + capability.height = h->bmiHeader.biHeight; + capability.interlaced = h->dwInterlaceFlags & (AMINTERLACE_IsInterlaced | AMINTERLACE_DisplayModeBobOnly); avgTimePerFrame = h->AvgTimePerFrame; @@ -587,11 +577,11 @@ int32_t DeviceInfoDS::CreateCapabilityMap( if (hrVC == S_OK) { LONGLONG *frameDurationList = NULL; - LONGLONG maxFPS; + LONGLONG maxFPS; long listSize; SIZE size; - size.cx = capability->width; - size.cy = capability->height; + size.cx = capability.width; + size.cy = capability.height; // GetMaxAvailableFrameRate doesn't return max frame rate always // eg: Logitech Notebook. This may be due to a bug in that API @@ -608,12 +598,12 @@ int32_t DeviceInfoDS::CreateCapabilityMap( // Initialize and check the returned list for null since // some broken drivers don't modify it. if (hrVC == S_OK && listSize > 0 && frameDurationList && - 0 != (maxFPS = GetMaxOfFrameArray(frameDurationList, + 0 != (maxFPS = GetMaxOfFrameArray(frameDurationList, listSize))) { - capability->maxFPS = static_cast (10000000 + capability.maxFPS = static_cast (10000000 / maxFPS); - capability->supportFrameRateControl = true; + capability.supportFrameRateControl = true; } else // use existing method { @@ -621,61 +611,61 @@ int32_t DeviceInfoDS::CreateCapabilityMap( _id, "GetMaxAvailableFrameRate NOT SUPPORTED"); if (avgTimePerFrame > 0) - capability->maxFPS = static_cast (10000000 + capability.maxFPS = static_cast (10000000 / avgTimePerFrame); else - capability->maxFPS = 0; + capability.maxFPS = 0; } } else // use existing method in case IAMVideoControl is not supported { if (avgTimePerFrame > 0) - capability->maxFPS = static_cast (10000000 + capability.maxFPS = static_cast (10000000 / avgTimePerFrame); else - capability->maxFPS = 0; + capability.maxFPS = 0; } // can't switch MEDIATYPE :~( if (pmt->subtype == MEDIASUBTYPE_I420) { - capability->rawType = kVideoI420; + capability.rawType = kVideoI420; } else if (pmt->subtype == MEDIASUBTYPE_IYUV) { - capability->rawType = kVideoIYUV; + capability.rawType = kVideoIYUV; } else if (pmt->subtype == MEDIASUBTYPE_RGB24) { - capability->rawType = kVideoRGB24; + capability.rawType = kVideoRGB24; } else if (pmt->subtype == MEDIASUBTYPE_YUY2) { - capability->rawType = kVideoYUY2; + capability.rawType = kVideoYUY2; } else if (pmt->subtype == MEDIASUBTYPE_RGB565) { - capability->rawType = kVideoRGB565; + capability.rawType = kVideoRGB565; } else if (pmt->subtype == MEDIASUBTYPE_MJPG) { - capability->rawType = kVideoMJPEG; + capability.rawType = kVideoMJPEG; } else if (pmt->subtype == MEDIASUBTYPE_dvsl || pmt->subtype == MEDIASUBTYPE_dvsd || pmt->subtype == MEDIASUBTYPE_dvhd) // If this is an external DV camera { - capability->rawType = kVideoYUY2;// MS DV filter seems to create this type + capability.rawType = kVideoYUY2;// MS DV filter seems to create this type } else if (pmt->subtype == MEDIASUBTYPE_UYVY) // Seen used by Declink capture cards { - capability->rawType = kVideoUYVY; + capability.rawType = kVideoUYVY; } else if (pmt->subtype == MEDIASUBTYPE_HDYC) // Seen used by Declink capture cards. Uses BT. 709 color. Not entiry correct to use UYVY. http://en.wikipedia.org/wiki/YCbCr { WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id, "Device support HDYC."); - capability->rawType = kVideoUYVY; + capability.rawType = kVideoUYVY; } else { @@ -685,22 +675,22 @@ int32_t DeviceInfoDS::CreateCapabilityMap( webrtc::kTraceVideoCapture, _id, "Device support unknown media type %ls, width %d, height %d", strGuid); - delete capability; continue; } // Get the expected capture delay from the static list - capability->expectedCaptureDelay + capability.expectedCaptureDelay = GetExpectedCaptureDelay(WindowsCaptureDelays, NoWindowsCaptureDelays, productId, - capability->width, - capability->height); - _captureCapabilities[index++] = capability; + capability.width, + capability.height); + _captureCapabilities.push_back(capability); + _captureCapabilitiesWindows.push_back(capability); WEBRTC_TRACE( webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id, "Camera capability, width:%d height:%d type:%d fps:%d", - capability->width, capability->height, - capability->rawType, capability->maxFPS); + capability.width, capability.height, + capability.rawType, capability.maxFPS); } _FreeMediaType(*pmt); pmt = NULL; diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/windows/device_info_ds.h b/media/webrtc/trunk/webrtc/modules/video_capture/windows/device_info_ds.h index 04f4eba7f31e..88adda857002 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/windows/device_info_ds.h +++ b/media/webrtc/trunk/webrtc/modules/video_capture/windows/device_info_ds.h @@ -29,8 +29,8 @@ struct VideoCaptureCapabilityWindows: public VideoCaptureCapability directShowCapabilityIndex = 0; supportFrameRateControl = false; } - }; + class DeviceInfoDS: public DeviceInfoImpl { public: @@ -55,7 +55,7 @@ public: char* productUniqueIdUTF8, uint32_t productUniqueIdUTF8Length); - /* + /* * Display OS /capture device specific settings dialog */ virtual int32_t @@ -98,7 +98,7 @@ protected: private: ICreateDevEnum* _dsDevEnum; bool _CoUninitializeIsRequired; - + std::vector _captureCapabilitiesWindows; }; } // namespace videocapturemodule } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/i420/main/source/i420.gypi b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/i420/main/source/i420.gypi index b62f73014cf4..dc2adcef1516 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/i420/main/source/i420.gypi +++ b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/i420/main/source/i420.gypi @@ -14,17 +14,6 @@ 'dependencies': [ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', ], - 'include_dirs': [ - '../interface', - '../../../interface', - '../../../../../../common_video/interface', - ], - 'direct_dependent_settings': { - 'include_dirs': [ - '../interface', - '../../../../../../common_video/interface', - ], - }, 'sources': [ '../interface/i420.h', 'i420.cc', diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc index c24f8f6eaada..747557fdf9ee 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc +++ b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc @@ -156,8 +156,7 @@ class VideoProcessorIntegrationTest: public testing::Test { // Setup the TestConfig struct for processing of a clip in CIF resolution. config_.input_filename = webrtc::test::ResourcePath("foreman_cif", "yuv"); - config_.output_filename = webrtc::test::OutputPath() + - "foreman_cif_short_video_codecs_test_framework_integrationtests.yuv"; + config_.output_filename = tmpnam(NULL); config_.frame_length_in_bytes = CalcBufferSize(kI420, kCIFWidth, kCIFHeight); config_.verbose = false; @@ -495,6 +494,9 @@ class VideoProcessorIntegrationTest: public testing::Test { EXPECT_GT(psnr_result.min, quality_metrics.minimum_min_psnr); EXPECT_GT(ssim_result.average, quality_metrics.minimum_avg_ssim); EXPECT_GT(ssim_result.min, quality_metrics.minimum_min_ssim); + if (!remove(config_.output_filename.c_str())) { + fprintf(stderr, "Failed to remove temporary file!"); + } } }; diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.cc b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.cc index 9eda36e84390..dcd74790c292 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.cc +++ b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.cc @@ -26,14 +26,12 @@ using namespace webrtc; NormalAsyncTest::NormalAsyncTest() : -NormalTest("Async Normal Test 1", "A test of normal execution of the codec", - _testNo), +NormalTest("Async Normal Test 1", "A test of normal execution of the codec", 1), _decodeCompleteTime(0), _encodeCompleteTime(0), _encFrameCnt(0), _decFrameCnt(0), _requestKeyFrame(false), -_testNo(1), _appendNext(false), _missingFrames(false), _rttFrames(0), @@ -47,13 +45,13 @@ _waitForKey(false) NormalAsyncTest::NormalAsyncTest(uint32_t bitRate) : NormalTest("Async Normal Test 1", "A test of normal execution of the codec", - bitRate, _testNo), + bitRate, + 1), _decodeCompleteTime(0), _encodeCompleteTime(0), _encFrameCnt(0), _decFrameCnt(0), _requestKeyFrame(false), -_testNo(1), _appendNext(false), _missingFrames(false), _rttFrames(0), @@ -67,13 +65,12 @@ _waitForKey(false) NormalAsyncTest::NormalAsyncTest(std::string name, std::string description, unsigned int testNo) : -NormalTest(name, description, _testNo), +NormalTest(name, description, testNo), _decodeCompleteTime(0), _encodeCompleteTime(0), _encFrameCnt(0), _decFrameCnt(0), _requestKeyFrame(false), -_testNo(testNo), _lengthEncFrame(0), _appendNext(false), _missingFrames(false), @@ -88,13 +85,12 @@ _waitForKey(false) NormalAsyncTest::NormalAsyncTest(std::string name, std::string description, uint32_t bitRate, unsigned int testNo) : -NormalTest(name, description, bitRate, _testNo), +NormalTest(name, description, bitRate, testNo), _decodeCompleteTime(0), _encodeCompleteTime(0), _encFrameCnt(0), _decFrameCnt(0), _requestKeyFrame(false), -_testNo(testNo), _lengthEncFrame(0), _appendNext(false), _missingFrames(false), @@ -110,13 +106,12 @@ NormalAsyncTest::NormalAsyncTest(std::string name, std::string description, uint32_t bitRate, unsigned int testNo, unsigned int rttFrames) : -NormalTest(name, description, bitRate, _testNo), +NormalTest(name, description, bitRate, testNo), _decodeCompleteTime(0), _encodeCompleteTime(0), _encFrameCnt(0), _decFrameCnt(0), _requestKeyFrame(false), -_testNo(testNo), _lengthEncFrame(0), _appendNext(false), _missingFrames(false), diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.h b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.h index d2d17eebc7f5..1e62534acab9 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.h +++ b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.h @@ -118,7 +118,6 @@ protected: int _encFrameCnt; int _decFrameCnt; bool _requestKeyFrame; - unsigned int _testNo; unsigned int _lengthEncFrame; FrameQueueTuple* _frameToDecode; bool _appendNext; diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/test_framework/test_framework.gypi b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/test_framework/test_framework.gypi index fe509803b9e8..0d046d102a0f 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/test_framework/test_framework.gypi +++ b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/test_framework/test_framework.gypi @@ -19,19 +19,6 @@ '<(webrtc_root)/test/metrics.gyp:metrics', '<(webrtc_root)/test/test.gyp:test_support', ], - - 'include_dirs': [ - '../interface', - '<(DEPTH)/testing/gtest/include', - '../../../../common_video/interface', - ], - - 'direct_dependent_settings': { - 'include_dirs': [ - '../interface', - ], - }, - 'sources': [ # header files 'benchmark.h', diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc index 62d9ae613cf6..a92a87afc086 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc +++ b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc @@ -36,6 +36,13 @@ DefaultTemporalLayers::DefaultTemporalLayers(int numberOfTemporalLayers, memset(temporal_pattern_, 0, sizeof(temporal_pattern_)); } +int DefaultTemporalLayers::CurrentLayerId() const { + assert(temporal_ids_length_ > 0); + int index = pattern_idx_ % temporal_ids_length_; + assert(index >= 0); + return temporal_ids_[index]; + } + bool DefaultTemporalLayers::ConfigureBitrates(int bitrateKbit, int max_bitrate_kbit, int framerate, @@ -247,8 +254,7 @@ void DefaultTemporalLayers::PopulateCodecSpecific( vp8_info->temporalIdx = 0; vp8_info->layerSync = true; } else { - vp8_info->temporalIdx = temporal_ids_ - [pattern_idx_ % temporal_ids_length_]; + vp8_info->temporalIdx = CurrentLayerId(); TemporalReferences temporal_reference = temporal_pattern_[pattern_idx_ % temporal_pattern_length_]; diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.h b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.h index 99b0e9569688..61f281f2b11d 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.h +++ b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.h @@ -37,6 +37,8 @@ class DefaultTemporalLayers : public TemporalLayers { virtual void FrameEncoded(unsigned int size, uint32_t timestamp) {} + virtual int CurrentLayerId() const; + private: enum TemporalReferences { // For 1 layer case: reference all (last, golden, and alt ref), but only diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/include/vp8_common_types.h b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/include/vp8_common_types.h index 8b97b3b46c58..c2cefdd94e78 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/include/vp8_common_types.h +++ b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/include/vp8_common_types.h @@ -18,11 +18,11 @@ namespace webrtc { // Ratio allocation between temporal streams: // Values as required for the VP8 codec (accumulating). static const float - kVp8LayerRateAlloction[kMaxTemporalStreams][kMaxTemporalStreams] = { - {1.0f, 0, 0, 0}, // 1 layer - {0.6f, 1.0f , 0 , 0}, // 2 layers {60%, 40%} - {0.4f, 0.6f , 1.0f, 0}, // 3 layers {40%, 20%, 40%} - {0.25f, 0.4f, 0.6f, 1.0f} // 4 layers {25%, 15%, 20%, 40%} + kVp8LayerRateAlloction[kMaxTemporalStreams][kMaxTemporalStreams] = { + {1.0f, 1.0f, 1.0f, 1.0f}, // 1 layer + {0.6f, 1.0f, 1.0f, 1.0f}, // 2 layers {60%, 40%} + {0.4f, 0.6f, 1.0f, 1.0f}, // 3 layers {40%, 20%, 40%} + {0.25f, 0.4f, 0.6f, 1.0f} // 4 layers {25%, 15%, 20%, 40%} }; } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/realtime_temporal_layers.cc b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/realtime_temporal_layers.cc new file mode 100644 index 000000000000..f16c7568137d --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/realtime_temporal_layers.cc @@ -0,0 +1,268 @@ +/* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. +* +* Use of this source code is governed by a BSD-style license +* that can be found in the LICENSE file in the root of the source +* tree. An additional intellectual property rights grant can be found +* in the file PATENTS. All contributing project authors may +* be found in the AUTHORS file in the root of the source tree. +*/ + +#include +#include + +#include "vpx/vpx_encoder.h" +#include "vpx/vp8cx.h" +#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" +#include "webrtc/modules/video_coding/codecs/vp8/include/vp8_common_types.h" +#include "webrtc/modules/video_coding/codecs/vp8/temporal_layers.h" + +// This file implements logic to adapt the number of temporal layers based on +// input frame rate in order to avoid having the base layer being relaying at +// a below acceptable framerate. +namespace webrtc { +namespace { +enum { + kTemporalUpdateLast = VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF | + VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF, + + kTemporalUpdateGolden = + VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST, + + kTemporalUpdateGoldenWithoutDependency = + kTemporalUpdateGolden | VP8_EFLAG_NO_REF_GF, + + kTemporalUpdateAltref = VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_LAST, + + kTemporalUpdateAltrefWithoutDependency = + kTemporalUpdateAltref | VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_REF_GF, + + kTemporalUpdateNone = VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF | + VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_ENTROPY, + + kTemporalUpdateNoneNoRefAltref = kTemporalUpdateNone | VP8_EFLAG_NO_REF_ARF, + + kTemporalUpdateNoneNoRefGoldenRefAltRef = + VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF | + VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_ENTROPY, + + kTemporalUpdateGoldenWithoutDependencyRefAltRef = + VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST, + + kTemporalUpdateLastRefAltRef = + VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_REF_GF, + + kTemporalUpdateGoldenRefAltRef = VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST, + + kTemporalUpdateLastAndGoldenRefAltRef = + VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_REF_GF, + + kTemporalUpdateLastRefAll = VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_GF, +}; + +int CalculateNumberOfTemporalLayers(int current_temporal_layers, + int input_fps) { + if (input_fps >= 24) { + return 3; + } + if (input_fps >= 20 && current_temporal_layers >= 3) { + // Keep doing 3 temporal layers until we go below 20fps. + return 3; + } + if (input_fps >= 10) { + return 2; + } + if (input_fps > 8 && current_temporal_layers >= 2) { + // keep doing 2 temporal layers until we go below 8fps + return 2; + } + return 1; +} + +class RealTimeTemporalLayers : public TemporalLayers { + public: + RealTimeTemporalLayers(int max_num_temporal_layers, + uint8_t initial_tl0_pic_idx) + : temporal_layers_(1), + max_temporal_layers_(max_num_temporal_layers), + tl0_pic_idx_(initial_tl0_pic_idx), + frame_counter_(static_cast(-1)), + timestamp_(0), + last_base_layer_sync_(0), + layer_ids_length_(0), + layer_ids_(NULL), + encode_flags_length_(0), + encode_flags_(NULL) { + assert(max_temporal_layers_ >= 1); + assert(max_temporal_layers_ <= 3); + } + + virtual ~RealTimeTemporalLayers() {} + + virtual bool ConfigureBitrates(int bitrate_kbit, + int max_bitrate_kbit, + int framerate, + vpx_codec_enc_cfg_t* cfg) { + temporal_layers_ = + CalculateNumberOfTemporalLayers(temporal_layers_, framerate); + temporal_layers_ = std::min(temporal_layers_, max_temporal_layers_); + assert(temporal_layers_ >= 1 && temporal_layers_ <= 3); + + cfg->ts_number_layers = temporal_layers_; + for (int tl = 0; tl < temporal_layers_; ++tl) { + cfg->ts_target_bitrate[tl] = + bitrate_kbit * kVp8LayerRateAlloction[temporal_layers_ - 1][tl]; + } + + switch (temporal_layers_) { + case 1: { + static const unsigned int layer_ids[] = {0u}; + layer_ids_ = layer_ids; + layer_ids_length_ = sizeof(layer_ids) / sizeof(*layer_ids); + + static const int encode_flags[] = {kTemporalUpdateLastRefAll}; + encode_flags_length_ = sizeof(encode_flags) / sizeof(*layer_ids); + encode_flags_ = encode_flags; + + cfg->ts_rate_decimator[0] = 1; + cfg->ts_periodicity = layer_ids_length_; + } break; + + case 2: { + static const unsigned int layer_ids[] = {0u, 1u}; + layer_ids_ = layer_ids; + layer_ids_length_ = sizeof(layer_ids) / sizeof(*layer_ids); + + static const int encode_flags[] = { + kTemporalUpdateLastAndGoldenRefAltRef, + kTemporalUpdateGoldenWithoutDependencyRefAltRef, + kTemporalUpdateLastRefAltRef, kTemporalUpdateGoldenRefAltRef, + kTemporalUpdateLastRefAltRef, kTemporalUpdateGoldenRefAltRef, + kTemporalUpdateLastRefAltRef, kTemporalUpdateNone + }; + encode_flags_length_ = sizeof(encode_flags) / sizeof(*layer_ids); + encode_flags_ = encode_flags; + + cfg->ts_rate_decimator[0] = 2; + cfg->ts_rate_decimator[1] = 1; + cfg->ts_periodicity = layer_ids_length_; + } break; + + case 3: { + static const unsigned int layer_ids[] = {0u, 2u, 1u, 2u}; + layer_ids_ = layer_ids; + layer_ids_length_ = sizeof(layer_ids) / sizeof(*layer_ids); + + static const int encode_flags[] = { + kTemporalUpdateLastAndGoldenRefAltRef, + kTemporalUpdateNoneNoRefGoldenRefAltRef, + kTemporalUpdateGoldenWithoutDependencyRefAltRef, kTemporalUpdateNone, + kTemporalUpdateLastRefAltRef, kTemporalUpdateNone, + kTemporalUpdateGoldenRefAltRef, kTemporalUpdateNone + }; + encode_flags_length_ = sizeof(encode_flags) / sizeof(*layer_ids); + encode_flags_ = encode_flags; + + cfg->ts_rate_decimator[0] = 4; + cfg->ts_rate_decimator[1] = 2; + cfg->ts_rate_decimator[2] = 1; + cfg->ts_periodicity = layer_ids_length_; + } break; + + default: + assert(false); + return false; + } + memcpy( + cfg->ts_layer_id, layer_ids_, sizeof(unsigned int) * layer_ids_length_); + return true; + } + + virtual int EncodeFlags(uint32_t timestamp) { + frame_counter_++; + return CurrentEncodeFlags(); + } + + int CurrentEncodeFlags() const { + assert(encode_flags_length_ > 0 && encode_flags_ != NULL); + int index = frame_counter_ % encode_flags_length_; + assert(index >= 0 && index < encode_flags_length_); + return encode_flags_[index]; + } + + virtual int CurrentLayerId() const { + assert(layer_ids_length_ > 0 && layer_ids_ != NULL); + int index = frame_counter_ % layer_ids_length_; + assert(index >= 0 && index < layer_ids_length_); + return layer_ids_[index]; + } + + virtual void PopulateCodecSpecific(bool base_layer_sync, + CodecSpecificInfoVP8* vp8_info, + uint32_t timestamp) { + assert(temporal_layers_ > 0); + + if (temporal_layers_ == 1) { + vp8_info->temporalIdx = kNoTemporalIdx; + vp8_info->layerSync = false; + vp8_info->tl0PicIdx = kNoTl0PicIdx; + } else { + if (base_layer_sync) { + vp8_info->temporalIdx = 0; + vp8_info->layerSync = true; + } else { + vp8_info->temporalIdx = CurrentLayerId(); + int temporal_reference = CurrentEncodeFlags(); + + if (temporal_reference == kTemporalUpdateAltrefWithoutDependency || + temporal_reference == kTemporalUpdateGoldenWithoutDependency || + temporal_reference == + kTemporalUpdateGoldenWithoutDependencyRefAltRef || + temporal_reference == kTemporalUpdateNoneNoRefGoldenRefAltRef || + (temporal_reference == kTemporalUpdateNone && + temporal_layers_ == 4)) { + vp8_info->layerSync = true; + } else { + vp8_info->layerSync = false; + } + } + if (last_base_layer_sync_ && vp8_info->temporalIdx != 0) { + // Regardless of pattern the frame after a base layer sync will always + // be a layer sync. + vp8_info->layerSync = true; + } + if (vp8_info->temporalIdx == 0 && timestamp != timestamp_) { + timestamp_ = timestamp; + tl0_pic_idx_++; + } + last_base_layer_sync_ = base_layer_sync; + vp8_info->tl0PicIdx = tl0_pic_idx_; + } + } + + void FrameEncoded(unsigned int size, uint32_t timestamp) {} + + private: + int temporal_layers_; + int max_temporal_layers_; + + int tl0_pic_idx_; + unsigned int frame_counter_; + uint32_t timestamp_; + bool last_base_layer_sync_; + + // Pattern of temporal layer ids. + int layer_ids_length_; + const unsigned int* layer_ids_; + + // Pattern of encode flags. + int encode_flags_length_; + const int* encode_flags_; +}; +} // namespace + +TemporalLayers* RealTimeTemporalLayersFactory::Create( + int max_temporal_layers, + uint8_t initial_tl0_pic_idx) const { + return new RealTimeTemporalLayers(max_temporal_layers, initial_tl0_pic_idx); +} +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/temporal_layers.h b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/temporal_layers.h index 9df88b9be4f6..7ca4840c3109 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/temporal_layers.h +++ b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/temporal_layers.h @@ -24,6 +24,8 @@ struct CodecSpecificInfoVP8; class TemporalLayers { public: + // Factory for TemporalLayer strategy. Default behaviour is a fixed pattern + // of temporal layers. See default_temporal_layers.cc struct Factory { Factory() {} virtual ~Factory() {} @@ -47,6 +49,17 @@ class TemporalLayers { uint32_t timestamp) = 0; virtual void FrameEncoded(unsigned int size, uint32_t timestamp) = 0; + + virtual int CurrentLayerId() const = 0; +}; + +// Factory for a temporal layers strategy that adaptively changes the number of +// layers based on input framerate so that the base layer has an acceptable +// framerate. See realtime_temporal_layers.cc +struct RealTimeTemporalLayersFactory : TemporalLayers::Factory { + virtual ~RealTimeTemporalLayersFactory() {} + virtual TemporalLayers* Create(int num_temporal_layers, + uint8_t initial_tl0_pic_idx) const; }; } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/vp8.gyp b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/vp8.gyp index 97295a59d0fd..3e3a4600b751 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/vp8.gyp +++ b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/vp8.gyp @@ -19,12 +19,6 @@ '<(webrtc_root)/modules/video_coding/utility/video_coding_utility.gyp:video_coding_utility', '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', ], - 'include_dirs': [ - 'include', - '<(webrtc_root)/common_video/interface', - '<(webrtc_root)/modules/video_coding/codecs/interface', - '<(webrtc_root)/modules/interface', - ], 'conditions': [ ['build_libvpx==1', { 'dependencies': [ @@ -38,13 +32,6 @@ }, }], ], - 'direct_dependent_settings': { - 'include_dirs': [ - 'include', - '<(webrtc_root)/common_video/interface', - '<(webrtc_root)/modules/video_coding/codecs/interface', - ], - }, 'sources': [ 'reference_picture_selection.h', 'reference_picture_selection.cc', @@ -53,6 +40,7 @@ 'vp8_impl.cc', 'default_temporal_layers.cc', 'default_temporal_layers.h', + 'realtime_temporal_layers.cc', 'temporal_layers.h', ], # Disable warnings to enable Win64 build, issue 1323. diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/interface/video_coding.h b/media/webrtc/trunk/webrtc/modules/video_coding/main/interface/video_coding.h index 4f04ce16ee4c..fa462c7f0063 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/interface/video_coding.h +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/interface/video_coding.h @@ -21,6 +21,7 @@ namespace webrtc { class Clock; +class EncodedImageCallback; class VideoEncoder; class VideoDecoder; struct CodecSpecificInfo; @@ -376,6 +377,18 @@ public: virtual int32_t RegisterReceiveStatisticsCallback( VCMReceiveStatisticsCallback* receiveStats) = 0; + // Register a decoder timing callback which will be called to deliver + // information about the timing of the decoder in the receiving side of the + // VCM, for instance the current and maximum frame decode latency. + // + // Input: + // - decoderTiming : The callback object to register. + // + // Return value : VCM_OK, on success. + // < 0, on error. + virtual int32_t RegisterDecoderTimingCallback( + VCMDecoderTimingCallback* decoderTiming) = 0; + // Register a frame type request callback. This callback will be called when the // module needs to request specific frame types from the send side. // @@ -582,6 +595,20 @@ public: // Disables recording of debugging information. virtual int StopDebugRecording() = 0; + + // Lets the sender suspend video when the rate drops below + // |threshold_bps|, and turns back on when the rate goes back up above + // |threshold_bps| + |window_bps|. + virtual void SuspendBelowMinBitrate() = 0; + + // Returns true if SuspendBelowMinBitrate is engaged and the video has been + // suspended due to bandwidth limitations; otherwise false. + virtual bool VideoSuspended() const = 0; + + virtual void RegisterPreDecodeImageCallback( + EncodedImageCallback* observer) = 0; + virtual void RegisterPostEncodeImageCallback( + EncodedImageCallback* post_encode_callback) = 0; }; } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/interface/video_coding_defines.h b/media/webrtc/trunk/webrtc/modules/video_coding/main/interface/video_coding_defines.h index ae1acd31000a..fab91afd3285 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/interface/video_coding_defines.h +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/interface/video_coding_defines.h @@ -118,6 +118,21 @@ class VCMReceiveStatisticsCallback { } }; +// Callback class used for informing the user of decode timing info. +class VCMDecoderTimingCallback { + public: + virtual void OnDecoderTiming(int decode_ms, + int max_decode_ms, + int current_delay_ms, + int target_delay_ms, + int jitter_buffer_ms, + int min_playout_delay_ms, + int render_delay_ms) = 0; + + protected: + virtual ~VCMDecoderTimingCallback() {} +}; + // Callback class used for telling the user about how to configure the FEC, // and the rates sent the last second is returned to the VCM. class VCMProtectionCallback { diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/codec_database.cc b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/codec_database.cc index fcb00b287b41..6fe1727bf090 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/codec_database.cc +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/codec_database.cc @@ -338,18 +338,13 @@ bool VCMCodecDataBase::RequiresEncoderReset(const VideoCodec& new_send_codec) { } break; case kVideoCodecGeneric: - if (memcmp(&new_send_codec.codecSpecific.Generic, - &send_codec_.codecSpecific.Generic, - sizeof(new_send_codec.codecSpecific.Generic)) != - 0) { - return true; - } break; // Known codecs without payload-specifics case kVideoCodecI420: case kVideoCodecRED: case kVideoCodecULPFEC: case kVideoCodecH264: + // TODO(jesup): analyze codec config for H264 break; // Unknown codec type, reset just to be sure. case kVideoCodecUnknown: diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/codec_timer.cc b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/codec_timer.cc index 9fee00bb4d56..a46225881362 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/codec_timer.cc +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/codec_timer.cc @@ -15,10 +15,13 @@ namespace webrtc { +// The first kIgnoredSampleCount samples will be ignored. +static const int32_t kIgnoredSampleCount = 5; + VCMCodecTimer::VCMCodecTimer() : _filteredMax(0), -_firstDecodeTime(true), +_ignoredSampleCount(0), _shortMax(0), _history() { @@ -35,7 +38,7 @@ int32_t VCMCodecTimer::StopTimer(int64_t startTimeMs, int64_t nowMs) void VCMCodecTimer::Reset() { _filteredMax = 0; - _firstDecodeTime = true; + _ignoredSampleCount = 0; _shortMax = 0; for (int i=0; i < MAX_HISTORY_SIZE; i++) { @@ -47,14 +50,14 @@ void VCMCodecTimer::Reset() // Update the max-value filter void VCMCodecTimer::MaxFilter(int32_t decodeTime, int64_t nowMs) { - if (!_firstDecodeTime) + if (_ignoredSampleCount >= kIgnoredSampleCount) { UpdateMaxHistory(decodeTime, nowMs); ProcessHistory(nowMs); } else { - _firstDecodeTime = false; + _ignoredSampleCount++; } } diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/codec_timer.h b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/codec_timer.h index e56aa6df80db..9268e8d8171b 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/codec_timer.h +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/codec_timer.h @@ -18,7 +18,7 @@ namespace webrtc { // MAX_HISTORY_SIZE * SHORT_FILTER_MS defines the window size in milliseconds -#define MAX_HISTORY_SIZE 20 +#define MAX_HISTORY_SIZE 10 #define SHORT_FILTER_MS 1000 class VCMShortMaxSample @@ -50,9 +50,10 @@ private: void ProcessHistory(int64_t nowMs); int32_t _filteredMax; - bool _firstDecodeTime; + // The number of samples ignored so far. + int32_t _ignoredSampleCount; int32_t _shortMax; - VCMShortMaxSample _history[MAX_HISTORY_SIZE]; + VCMShortMaxSample _history[MAX_HISTORY_SIZE]; }; diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/decoding_state.cc b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/decoding_state.cc index d4ada0fc1c64..134008e2b56f 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/decoding_state.cc +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/decoding_state.cc @@ -133,7 +133,12 @@ void VCMDecodingState::UpdateSyncState(const VCMFrameBuffer* frame) { // Sync will be broken if continuity is true for layers but not for the // other methods (PictureId and SeqNum). if (UsingPictureId(frame)) { - full_sync_ = ContinuousPictureId(frame->PictureId()); + // First check for a valid tl0PicId. + if (frame->Tl0PicId() - tl0_pic_id_ > 1) { + full_sync_ = false; + } else { + full_sync_ = ContinuousPictureId(frame->PictureId()); + } } else { full_sync_ = ContinuousSeqNum(static_cast( frame->GetLowSeqNum())); @@ -157,20 +162,21 @@ bool VCMDecodingState::ContinuousFrame(const VCMFrameBuffer* frame) const { // When in the initial state we always require a key frame to start decoding. if (in_initial_state_) return false; - - if (!ContinuousLayer(frame->TemporalId(), frame->Tl0PicId())) { - // Base layers are not continuous or temporal layers are inactive. - // In the presence of temporal layers, check for Picture ID/sequence number - // continuity if sync can be restored by this frame. - if (!full_sync_ && !frame->LayerSync()) - return false; - else if (UsingPictureId(frame)) { - return ContinuousPictureId(frame->PictureId()); - } else { - return ContinuousSeqNum(static_cast(frame->GetLowSeqNum())); - } + if (ContinuousLayer(frame->TemporalId(), frame->Tl0PicId())) + return true; + // tl0picId is either not used, or should remain unchanged. + if (frame->Tl0PicId() != tl0_pic_id_) + return false; + // Base layers are not continuous or temporal layers are inactive. + // In the presence of temporal layers, check for Picture ID/sequence number + // continuity if sync can be restored by this frame. + if (!full_sync_ && !frame->LayerSync()) + return false; + if (UsingPictureId(frame)) { + return ContinuousPictureId(frame->PictureId()); + } else { + return ContinuousSeqNum(static_cast(frame->GetLowSeqNum())); } - return true; } bool VCMDecodingState::ContinuousPictureId(int picture_id) const { diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/decoding_state_unittest.cc b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/decoding_state_unittest.cc index 48c8c79ee41e..10f1d6e4dd70 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/decoding_state_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/decoding_state_unittest.cc @@ -31,49 +31,49 @@ TEST(TestDecodingState, FrameContinuity) { // Check that makes decision based on correct method. VCMFrameBuffer frame; VCMFrameBuffer frame_key; - VCMPacket* packet = new VCMPacket(); - packet->isFirstPacket = true; - packet->timestamp = 1; - packet->seqNum = 0xffff; - packet->frameType = kVideoFrameDelta; - packet->codecSpecificHeader.codec = kRtpVideoVp8; - packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0x007F; + VCMPacket packet; + packet.isFirstPacket = true; + packet.timestamp = 1; + packet.seqNum = 0xffff; + packet.frameType = kVideoFrameDelta; + packet.codecSpecificHeader.codec = kRtpVideoVp8; + packet.codecSpecificHeader.codecHeader.VP8.pictureId = 0x007F; FrameData frame_data; frame_data.rtt_ms = 0; frame_data.rolling_average_packets_per_frame = -1; - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); // Always start with a key frame. dec_state.Reset(); EXPECT_FALSE(dec_state.ContinuousFrame(&frame)); - packet->frameType = kVideoFrameKey; - EXPECT_LE(0, frame_key.InsertPacket(*packet, 0, kNoErrors, frame_data)); + packet.frameType = kVideoFrameKey; + EXPECT_LE(0, frame_key.InsertPacket(packet, 0, kNoErrors, frame_data)); EXPECT_TRUE(dec_state.ContinuousFrame(&frame_key)); dec_state.SetState(&frame); frame.Reset(); - packet->frameType = kVideoFrameDelta; + packet.frameType = kVideoFrameDelta; // Use pictureId - packet->isFirstPacket = false; - packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0x0002; - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + packet.isFirstPacket = false; + packet.codecSpecificHeader.codecHeader.VP8.pictureId = 0x0002; + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); EXPECT_FALSE(dec_state.ContinuousFrame(&frame)); frame.Reset(); - packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0; - packet->seqNum = 10; - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + packet.codecSpecificHeader.codecHeader.VP8.pictureId = 0; + packet.seqNum = 10; + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); EXPECT_TRUE(dec_state.ContinuousFrame(&frame)); // Use sequence numbers. - packet->codecSpecificHeader.codecHeader.VP8.pictureId = kNoPictureId; + packet.codecSpecificHeader.codecHeader.VP8.pictureId = kNoPictureId; frame.Reset(); - packet->seqNum = dec_state.sequence_num() - 1u; - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + packet.seqNum = dec_state.sequence_num() - 1u; + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); EXPECT_FALSE(dec_state.ContinuousFrame(&frame)); frame.Reset(); - packet->seqNum = dec_state.sequence_num() + 1u; - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + packet.seqNum = dec_state.sequence_num() + 1u; + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); // Insert another packet to this frame - packet->seqNum++; - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + packet.seqNum++; + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); // Verify wrap. EXPECT_LE(dec_state.sequence_num(), 0xffff); EXPECT_TRUE(dec_state.ContinuousFrame(&frame)); @@ -82,81 +82,80 @@ TEST(TestDecodingState, FrameContinuity) { // Insert packet with temporal info. dec_state.Reset(); frame.Reset(); - packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0; - packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0; - packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0; - packet->seqNum = 1; - packet->timestamp = 1; + packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0; + packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0; + packet.codecSpecificHeader.codecHeader.VP8.pictureId = 0; + packet.seqNum = 1; + packet.timestamp = 1; EXPECT_TRUE(dec_state.full_sync()); - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); dec_state.SetState(&frame); EXPECT_TRUE(dec_state.full_sync()); frame.Reset(); // 1 layer up - still good. - packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0; - packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 1; - packet->codecSpecificHeader.codecHeader.VP8.pictureId = 1; - packet->seqNum = 2; - packet->timestamp = 2; - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0; + packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 1; + packet.codecSpecificHeader.codecHeader.VP8.pictureId = 1; + packet.seqNum = 2; + packet.timestamp = 2; + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); EXPECT_TRUE(dec_state.ContinuousFrame(&frame)); dec_state.SetState(&frame); EXPECT_TRUE(dec_state.full_sync()); frame.Reset(); // Lost non-base layer packet => should update sync parameter. - packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0; - packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 3; - packet->codecSpecificHeader.codecHeader.VP8.pictureId = 3; - packet->seqNum = 4; - packet->timestamp = 4; - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0; + packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 3; + packet.codecSpecificHeader.codecHeader.VP8.pictureId = 3; + packet.seqNum = 4; + packet.timestamp = 4; + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); EXPECT_FALSE(dec_state.ContinuousFrame(&frame)); // Now insert the next non-base layer (belonging to a next tl0PicId). frame.Reset(); - packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 1; - packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 2; - packet->codecSpecificHeader.codecHeader.VP8.pictureId = 4; - packet->seqNum = 5; - packet->timestamp = 5; - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 1; + packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 2; + packet.codecSpecificHeader.codecHeader.VP8.pictureId = 4; + packet.seqNum = 5; + packet.timestamp = 5; + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); // Checking continuity and not updating the state - this should not trigger // an update of sync state. EXPECT_FALSE(dec_state.ContinuousFrame(&frame)); EXPECT_TRUE(dec_state.full_sync()); // Next base layer (dropped interim non-base layers) - should update sync. frame.Reset(); - packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 1; - packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0; - packet->codecSpecificHeader.codecHeader.VP8.pictureId = 5; - packet->seqNum = 6; - packet->timestamp = 6; - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 1; + packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0; + packet.codecSpecificHeader.codecHeader.VP8.pictureId = 5; + packet.seqNum = 6; + packet.timestamp = 6; + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); EXPECT_TRUE(dec_state.ContinuousFrame(&frame)); dec_state.SetState(&frame); EXPECT_FALSE(dec_state.full_sync()); // Check wrap for temporal layers. frame.Reset(); - packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0x00FF; - packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0; - packet->codecSpecificHeader.codecHeader.VP8.pictureId = 6; - packet->seqNum = 7; - packet->timestamp = 7; - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0x00FF; + packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0; + packet.codecSpecificHeader.codecHeader.VP8.pictureId = 6; + packet.seqNum = 7; + packet.timestamp = 7; + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); dec_state.SetState(&frame); EXPECT_FALSE(dec_state.full_sync()); frame.Reset(); - packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0x0000; - packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0; - packet->codecSpecificHeader.codecHeader.VP8.pictureId = 7; - packet->seqNum = 8; - packet->timestamp = 8; - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0x0000; + packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0; + packet.codecSpecificHeader.codecHeader.VP8.pictureId = 7; + packet.seqNum = 8; + packet.timestamp = 8; + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); EXPECT_TRUE(dec_state.ContinuousFrame(&frame)); // The current frame is not continuous dec_state.SetState(&frame); EXPECT_FALSE(dec_state.ContinuousFrame(&frame)); - delete packet; } TEST(TestDecodingState, UpdateOldPacket) { @@ -164,45 +163,43 @@ TEST(TestDecodingState, UpdateOldPacket) { // Update only if zero size and newer than previous. // Should only update if the timeStamp match. VCMFrameBuffer frame; - VCMPacket* packet = new VCMPacket(); - packet->timestamp = 1; - packet->seqNum = 1; - packet->frameType = kVideoFrameDelta; + VCMPacket packet; + packet.timestamp = 1; + packet.seqNum = 1; + packet.frameType = kVideoFrameDelta; FrameData frame_data; frame_data.rtt_ms = 0; frame_data.rolling_average_packets_per_frame = -1; - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); dec_state.SetState(&frame); EXPECT_EQ(dec_state.sequence_num(), 1); // Insert an empty packet that does not belong to the same frame. // => Sequence num should be the same. - packet->timestamp = 2; - dec_state.UpdateOldPacket(packet); + packet.timestamp = 2; + dec_state.UpdateOldPacket(&packet); EXPECT_EQ(dec_state.sequence_num(), 1); // Now insert empty packet belonging to the same frame. - packet->timestamp = 1; - packet->seqNum = 2; - packet->frameType = kFrameEmpty; - packet->sizeBytes = 0; - dec_state.UpdateOldPacket(packet); + packet.timestamp = 1; + packet.seqNum = 2; + packet.frameType = kFrameEmpty; + packet.sizeBytes = 0; + dec_state.UpdateOldPacket(&packet); EXPECT_EQ(dec_state.sequence_num(), 2); // Now insert delta packet belonging to the same frame. - packet->timestamp = 1; - packet->seqNum = 3; - packet->frameType = kVideoFrameDelta; - packet->sizeBytes = 1400; - dec_state.UpdateOldPacket(packet); + packet.timestamp = 1; + packet.seqNum = 3; + packet.frameType = kVideoFrameDelta; + packet.sizeBytes = 1400; + dec_state.UpdateOldPacket(&packet); EXPECT_EQ(dec_state.sequence_num(), 3); // Insert a packet belonging to an older timestamp - should not update the // sequence number. - packet->timestamp = 0; - packet->seqNum = 4; - packet->frameType = kFrameEmpty; - packet->sizeBytes = 0; - dec_state.UpdateOldPacket(packet); + packet.timestamp = 0; + packet.seqNum = 4; + packet.frameType = kFrameEmpty; + packet.sizeBytes = 0; + dec_state.UpdateOldPacket(&packet); EXPECT_EQ(dec_state.sequence_num(), 3); - - delete packet; } TEST(TestDecodingState, MultiLayerBehavior) { @@ -212,88 +209,88 @@ TEST(TestDecodingState, MultiLayerBehavior) { // Set state for current frames. // tl0PicIdx 0, temporal id 0. VCMFrameBuffer frame; - VCMPacket* packet = new VCMPacket(); - packet->frameType = kVideoFrameDelta; - packet->codecSpecificHeader.codec = kRtpVideoVp8; - packet->timestamp = 0; - packet->seqNum = 0; - packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0; - packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0; - packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0; + VCMPacket packet; + packet.frameType = kVideoFrameDelta; + packet.codecSpecificHeader.codec = kRtpVideoVp8; + packet.timestamp = 0; + packet.seqNum = 0; + packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0; + packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0; + packet.codecSpecificHeader.codecHeader.VP8.pictureId = 0; FrameData frame_data; frame_data.rtt_ms = 0; frame_data.rolling_average_packets_per_frame = -1; - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); dec_state.SetState(&frame); // tl0PicIdx 0, temporal id 1. frame.Reset(); - packet->timestamp = 1; - packet->seqNum = 1; - packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0; - packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 1; - packet->codecSpecificHeader.codecHeader.VP8.pictureId = 1; - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + packet.timestamp = 1; + packet.seqNum = 1; + packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0; + packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 1; + packet.codecSpecificHeader.codecHeader.VP8.pictureId = 1; + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); EXPECT_TRUE(dec_state.ContinuousFrame(&frame)); dec_state.SetState(&frame); EXPECT_TRUE(dec_state.full_sync()); // Lost tl0PicIdx 0, temporal id 2. // Insert tl0PicIdx 0, temporal id 3. frame.Reset(); - packet->timestamp = 3; - packet->seqNum = 3; - packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0; - packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 3; - packet->codecSpecificHeader.codecHeader.VP8.pictureId = 3; - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + packet.timestamp = 3; + packet.seqNum = 3; + packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0; + packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 3; + packet.codecSpecificHeader.codecHeader.VP8.pictureId = 3; + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); EXPECT_FALSE(dec_state.ContinuousFrame(&frame)); dec_state.SetState(&frame); EXPECT_FALSE(dec_state.full_sync()); // Insert next base layer frame.Reset(); - packet->timestamp = 4; - packet->seqNum = 4; - packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 1; - packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0; - packet->codecSpecificHeader.codecHeader.VP8.pictureId = 4; - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + packet.timestamp = 4; + packet.seqNum = 4; + packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 1; + packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0; + packet.codecSpecificHeader.codecHeader.VP8.pictureId = 4; + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); EXPECT_TRUE(dec_state.ContinuousFrame(&frame)); dec_state.SetState(&frame); EXPECT_FALSE(dec_state.full_sync()); // Insert key frame - should update sync value. // A key frame is always a base layer. frame.Reset(); - packet->frameType = kVideoFrameKey; - packet->isFirstPacket = 1; - packet->timestamp = 5; - packet->seqNum = 5; - packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 2; - packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0; - packet->codecSpecificHeader.codecHeader.VP8.pictureId = 5; - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + packet.frameType = kVideoFrameKey; + packet.isFirstPacket = 1; + packet.timestamp = 5; + packet.seqNum = 5; + packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 2; + packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0; + packet.codecSpecificHeader.codecHeader.VP8.pictureId = 5; + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); EXPECT_TRUE(dec_state.ContinuousFrame(&frame)); dec_state.SetState(&frame); EXPECT_TRUE(dec_state.full_sync()); // After sync, a continuous PictureId is required // (continuous base layer is not enough ) frame.Reset(); - packet->frameType = kVideoFrameDelta; - packet->timestamp = 6; - packet->seqNum = 6; - packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 3; - packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0; - packet->codecSpecificHeader.codecHeader.VP8.pictureId = 6; - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + packet.frameType = kVideoFrameDelta; + packet.timestamp = 6; + packet.seqNum = 6; + packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 3; + packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0; + packet.codecSpecificHeader.codecHeader.VP8.pictureId = 6; + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); EXPECT_TRUE(dec_state.ContinuousFrame(&frame)); EXPECT_TRUE(dec_state.full_sync()); frame.Reset(); - packet->frameType = kVideoFrameDelta; - packet->isFirstPacket = 1; - packet->timestamp = 8; - packet->seqNum = 8; - packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 4; - packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0; - packet->codecSpecificHeader.codecHeader.VP8.pictureId = 8; - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + packet.frameType = kVideoFrameDelta; + packet.isFirstPacket = 1; + packet.timestamp = 8; + packet.seqNum = 8; + packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 4; + packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0; + packet.codecSpecificHeader.codecHeader.VP8.pictureId = 8; + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); EXPECT_FALSE(dec_state.ContinuousFrame(&frame)); EXPECT_TRUE(dec_state.full_sync()); dec_state.SetState(&frame); @@ -301,15 +298,15 @@ TEST(TestDecodingState, MultiLayerBehavior) { // Insert a non-ref frame - should update sync value. frame.Reset(); - packet->frameType = kVideoFrameDelta; - packet->isFirstPacket = 1; - packet->timestamp = 9; - packet->seqNum = 9; - packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 4; - packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 2; - packet->codecSpecificHeader.codecHeader.VP8.pictureId = 9; - packet->codecSpecificHeader.codecHeader.VP8.layerSync = true; - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + packet.frameType = kVideoFrameDelta; + packet.isFirstPacket = 1; + packet.timestamp = 9; + packet.seqNum = 9; + packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 4; + packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 2; + packet.codecSpecificHeader.codecHeader.VP8.pictureId = 9; + packet.codecSpecificHeader.codecHeader.VP8.layerSync = true; + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); dec_state.SetState(&frame); EXPECT_TRUE(dec_state.full_sync()); @@ -321,47 +318,45 @@ TEST(TestDecodingState, MultiLayerBehavior) { // Base layer. frame.Reset(); dec_state.Reset(); - packet->frameType = kVideoFrameDelta; - packet->isFirstPacket = 1; - packet->markerBit = 1; - packet->timestamp = 0; - packet->seqNum = 0; - packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0; - packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0; - packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0; - packet->codecSpecificHeader.codecHeader.VP8.layerSync = false; - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + packet.frameType = kVideoFrameDelta; + packet.isFirstPacket = 1; + packet.markerBit = 1; + packet.timestamp = 0; + packet.seqNum = 0; + packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0; + packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0; + packet.codecSpecificHeader.codecHeader.VP8.pictureId = 0; + packet.codecSpecificHeader.codecHeader.VP8.layerSync = false; + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); dec_state.SetState(&frame); EXPECT_TRUE(dec_state.full_sync()); // Layer 2 - 2 packets (insert one, lose one). frame.Reset(); - packet->frameType = kVideoFrameDelta; - packet->isFirstPacket = 1; - packet->markerBit = 0; - packet->timestamp = 1; - packet->seqNum = 1; - packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0; - packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 2; - packet->codecSpecificHeader.codecHeader.VP8.pictureId = 1; - packet->codecSpecificHeader.codecHeader.VP8.layerSync = true; - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + packet.frameType = kVideoFrameDelta; + packet.isFirstPacket = 1; + packet.markerBit = 0; + packet.timestamp = 1; + packet.seqNum = 1; + packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0; + packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 2; + packet.codecSpecificHeader.codecHeader.VP8.pictureId = 1; + packet.codecSpecificHeader.codecHeader.VP8.layerSync = true; + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); EXPECT_TRUE(dec_state.ContinuousFrame(&frame)); // Layer 1 frame.Reset(); - packet->frameType = kVideoFrameDelta; - packet->isFirstPacket = 1; - packet->markerBit = 1; - packet->timestamp = 2; - packet->seqNum = 3; - packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0; - packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 1; - packet->codecSpecificHeader.codecHeader.VP8.pictureId = 2; - packet->codecSpecificHeader.codecHeader.VP8.layerSync = true; - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + packet.frameType = kVideoFrameDelta; + packet.isFirstPacket = 1; + packet.markerBit = 1; + packet.timestamp = 2; + packet.seqNum = 3; + packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0; + packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 1; + packet.codecSpecificHeader.codecHeader.VP8.pictureId = 2; + packet.codecSpecificHeader.codecHeader.VP8.layerSync = true; + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); EXPECT_FALSE(dec_state.ContinuousFrame(&frame)); EXPECT_TRUE(dec_state.full_sync()); - - delete packet; } TEST(TestDecodingState, DiscontinuousPicIdContinuousSeqNum) { @@ -402,22 +397,53 @@ TEST(TestDecodingState, OldInput) { // Identify packets belonging to old frames/packets. // Set state for current frames. VCMFrameBuffer frame; - VCMPacket* packet = new VCMPacket(); - packet->timestamp = 10; - packet->seqNum = 1; + VCMPacket packet; + packet.timestamp = 10; + packet.seqNum = 1; FrameData frame_data; frame_data.rtt_ms = 0; frame_data.rolling_average_packets_per_frame = -1; - EXPECT_LE(0, frame.InsertPacket(*packet, 0, kNoErrors, frame_data)); + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); dec_state.SetState(&frame); - packet->timestamp = 9; - EXPECT_TRUE(dec_state.IsOldPacket(packet)); + packet.timestamp = 9; + EXPECT_TRUE(dec_state.IsOldPacket(&packet)); // Check for old frame frame.Reset(); - frame.InsertPacket(*packet, 0, kNoErrors, frame_data); + frame.InsertPacket(packet, 0, kNoErrors, frame_data); EXPECT_TRUE(dec_state.IsOldFrame(&frame)); - - - delete packet; } + +TEST(TestDecodingState, PictureIdRepeat) { + VCMDecodingState dec_state; + VCMFrameBuffer frame; + VCMPacket packet; + packet.frameType = kVideoFrameDelta; + packet.codecSpecificHeader.codec = kRtpVideoVp8; + packet.timestamp = 0; + packet.seqNum = 0; + packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0; + packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0; + packet.codecSpecificHeader.codecHeader.VP8.pictureId = 0; + FrameData frame_data; + frame_data.rtt_ms = 0; + frame_data.rolling_average_packets_per_frame = -1; + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); + dec_state.SetState(&frame); + // tl0PicIdx 0, temporal id 1. + frame.Reset(); + ++packet.timestamp; + ++packet.seqNum; + packet.codecSpecificHeader.codecHeader.VP8.temporalIdx++; + packet.codecSpecificHeader.codecHeader.VP8.pictureId++; + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); + EXPECT_TRUE(dec_state.ContinuousFrame(&frame)); + frame.Reset(); + // Testing only gap in tl0PicIdx when tl0PicIdx in continuous. + packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx += 3; + packet.codecSpecificHeader.codecHeader.VP8.temporalIdx++; + packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 1; + EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data)); + EXPECT_FALSE(dec_state.ContinuousFrame(&frame)); +} + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/encoded_frame.cc b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/encoded_frame.cc index 5ce83177c44a..3e981cf7ee04 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/encoded_frame.cc +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/encoded_frame.cc @@ -174,33 +174,16 @@ VCMEncodedFrame::VerifyAndAllocate(const uint32_t minimumSize) webrtc::FrameType VCMEncodedFrame::ConvertFrameType(VideoFrameType frameType) { - switch(frameType) - { + switch(frameType) { case kKeyFrame: - { - return kVideoFrameKey; - } + return kVideoFrameKey; case kDeltaFrame: - { - return kVideoFrameDelta; - } - case kGoldenFrame: - { - return kVideoFrameGolden; - } - case kAltRefFrame: - { - return kVideoFrameAltRef; - } + return kVideoFrameDelta; case kSkipFrame: - { - return kFrameEmpty; - } + return kFrameEmpty; default: - { - return kVideoFrameDelta; - } - } + return kVideoFrameDelta; + } } VideoFrameType VCMEncodedFrame::ConvertFrameType(webrtc::FrameType frame_type) { @@ -209,10 +192,6 @@ VideoFrameType VCMEncodedFrame::ConvertFrameType(webrtc::FrameType frame_type) { return kKeyFrame; case kVideoFrameDelta: return kDeltaFrame; - case kVideoFrameGolden: - return kGoldenFrame; - case kVideoFrameAltRef: - return kAltRefFrame; default: assert(false); return kDeltaFrame; diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/generic_encoder.cc b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/generic_encoder.cc index 55b5f658e76f..e9720d901abc 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/generic_encoder.cc +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/generic_encoder.cc @@ -12,8 +12,46 @@ #include "webrtc/modules/video_coding/main/source/encoded_frame.h" #include "webrtc/modules/video_coding/main/source/generic_encoder.h" #include "webrtc/modules/video_coding/main/source/media_optimization.h" +#include "webrtc/system_wrappers/interface/critical_section_wrapper.h" namespace webrtc { +namespace { +// Map information from info into rtp. If no relevant information is found +// in info, rtp is set to NULL. +void CopyCodecSpecific(const CodecSpecificInfo* info, RTPVideoHeader** rtp) { + if (!info) { + *rtp = NULL; + return; + } + switch (info->codecType) { + case kVideoCodecVP8: { + (*rtp)->codec = kRtpVideoVp8; + (*rtp)->codecHeader.VP8.InitRTPVideoHeaderVP8(); + (*rtp)->codecHeader.VP8.pictureId = info->codecSpecific.VP8.pictureId; + (*rtp)->codecHeader.VP8.nonReference = + info->codecSpecific.VP8.nonReference; + (*rtp)->codecHeader.VP8.temporalIdx = info->codecSpecific.VP8.temporalIdx; + (*rtp)->codecHeader.VP8.layerSync = info->codecSpecific.VP8.layerSync; + (*rtp)->codecHeader.VP8.tl0PicIdx = info->codecSpecific.VP8.tl0PicIdx; + (*rtp)->codecHeader.VP8.keyIdx = info->codecSpecific.VP8.keyIdx; + (*rtp)->simulcastIdx = info->codecSpecific.VP8.simulcastIdx; + return; + } + case kVideoCodecH264: + (*rtp)->codec = kRtpVideoH264; + (*rtp)->simulcastIdx = info->codecSpecific.H264.simulcastIdx; + return; + case kVideoCodecGeneric: + (*rtp)->codec = kRtpVideoGeneric; + (*rtp)->simulcastIdx = info->codecSpecific.generic.simulcast_idx; + return; + default: + // No codec specific info. Change RTP header pointer to NULL. + *rtp = NULL; + return; + } +} +} // namespace //#define DEBUG_ENCODER_BIT_STREAM @@ -49,10 +87,6 @@ VCMGenericEncoder::InitEncode(const VideoCodec* settings, _bitRate = settings->startBitrate * 1000; _frameRate = settings->maxFramerate; _codecType = settings->codecType; - if (_VCMencodedFrameCallback != NULL) - { - _VCMencodedFrameCallback->SetCodecType(_codecType); - } return _encoder.InitEncode(settings, numberOfCores, maxPayloadSize); } @@ -126,8 +160,6 @@ int32_t VCMGenericEncoder::RegisterEncodeCallback(VCMEncodedFrameCallback* VCMencodedFrameCallback) { _VCMencodedFrameCallback = VCMencodedFrameCallback; - - _VCMencodedFrameCallback->SetCodecType(_codecType); _VCMencodedFrameCallback->SetInternalSource(_internalSource); return _encoder.RegisterEncodeCompleteCallback(_VCMencodedFrameCallback); } @@ -141,13 +173,13 @@ VCMGenericEncoder::InternalSource() const /*************************** * Callback Implementation ***************************/ -VCMEncodedFrameCallback::VCMEncodedFrameCallback(): +VCMEncodedFrameCallback::VCMEncodedFrameCallback( + EncodedImageCallback* post_encode_callback): _sendCallback(), _mediaOpt(NULL), -_encodedBytes(0), _payloadType(0), -_codecType(kVideoCodecUnknown), -_internalSource(false) +_internalSource(false), +post_encode_callback_(post_encode_callback) #ifdef DEBUG_ENCODER_BIT_STREAM , _bitStreamAfterEncoder(NULL) #endif @@ -177,6 +209,8 @@ VCMEncodedFrameCallback::Encoded( const CodecSpecificInfo* codecSpecificInfo, const RTPFragmentationHeader* fragmentationHeader) { + post_encode_callback_->Encoded(encodedImage); + FrameType frameType = VCMEncodedFrame::ConvertFrameType(encodedImage._frameType); uint32_t encodedBytes = 0; @@ -193,14 +227,7 @@ VCMEncodedFrameCallback::Encoded( RTPVideoHeader rtpVideoHeader; RTPVideoHeader* rtpVideoHeaderPtr = &rtpVideoHeader; - if (codecSpecificInfo) - { - CopyCodecSpecific(*codecSpecificInfo, &rtpVideoHeaderPtr); - } - else - { - rtpVideoHeaderPtr = NULL; - } + CopyCodecSpecific(codecSpecificInfo, &rtpVideoHeaderPtr); int32_t callbackReturn = _sendCallback->SendData( frameType, @@ -220,9 +247,8 @@ VCMEncodedFrameCallback::Encoded( { return VCM_UNINITIALIZED; } - _encodedBytes = encodedBytes; if (_mediaOpt != NULL) { - _mediaOpt->UpdateWithEncodedData(_encodedBytes, encodedImage._timeStamp, + _mediaOpt->UpdateWithEncodedData(encodedBytes, encodedImage._timeStamp, frameType); if (_internalSource) { @@ -232,12 +258,6 @@ VCMEncodedFrameCallback::Encoded( return VCM_OK; } -uint32_t -VCMEncodedFrameCallback::EncodedBytes() -{ - return _encodedBytes; -} - void VCMEncodedFrameCallback::SetMediaOpt( media_optimization::MediaOptimization *mediaOpt) @@ -245,34 +265,4 @@ VCMEncodedFrameCallback::SetMediaOpt( _mediaOpt = mediaOpt; } -void VCMEncodedFrameCallback::CopyCodecSpecific(const CodecSpecificInfo& info, - RTPVideoHeader** rtp) { - switch (info.codecType) { - case kVideoCodecVP8: { - (*rtp)->codec = kRtpVideoVp8; - (*rtp)->codecHeader.VP8.InitRTPVideoHeaderVP8(); - (*rtp)->codecHeader.VP8.pictureId = info.codecSpecific.VP8.pictureId; - (*rtp)->codecHeader.VP8.nonReference = - info.codecSpecific.VP8.nonReference; - (*rtp)->codecHeader.VP8.temporalIdx = info.codecSpecific.VP8.temporalIdx; - (*rtp)->codecHeader.VP8.layerSync = info.codecSpecific.VP8.layerSync; - (*rtp)->codecHeader.VP8.tl0PicIdx = info.codecSpecific.VP8.tl0PicIdx; - (*rtp)->codecHeader.VP8.keyIdx = info.codecSpecific.VP8.keyIdx; - (*rtp)->simulcastIdx = info.codecSpecific.VP8.simulcastIdx; - return; - } - case kVideoCodecH264: - (*rtp)->codec = kRtpVideoH264; - (*rtp)->simulcastIdx = info.codecSpecific.H264.simulcastIdx; - return; - case kVideoCodecGeneric: - (*rtp)->codec = kRtpVideoGeneric; - (*rtp)->simulcastIdx = info.codecSpecific.generic.simulcast_idx; - return; - default: - // No codec specific info. Change RTP header pointer to NULL. - *rtp = NULL; - return; - } -} } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/generic_encoder.h b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/generic_encoder.h index 0c2d287a9dfe..9277260af345 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/generic_encoder.h +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/generic_encoder.h @@ -15,8 +15,10 @@ #include -namespace webrtc -{ +#include "webrtc/system_wrappers/interface/scoped_ptr.h" + +namespace webrtc { +class CriticalSectionWrapper; namespace media_optimization { class MediaOptimization; @@ -28,7 +30,7 @@ class MediaOptimization; class VCMEncodedFrameCallback : public EncodedImageCallback { public: - VCMEncodedFrameCallback(); + VCMEncodedFrameCallback(EncodedImageCallback* post_encode_callback); virtual ~VCMEncodedFrameCallback(); /* @@ -39,10 +41,6 @@ public: const CodecSpecificInfo* codecSpecificInfo = NULL, const RTPFragmentationHeader* fragmentationHeader = NULL); /* - * Get number of encoded bytes - */ - uint32_t EncodedBytes(); - /* * Callback implementation - generic encoder encode complete */ int32_t SetTransportCallback(VCMPacketizationCallback* transport); @@ -52,23 +50,16 @@ public: void SetMediaOpt (media_optimization::MediaOptimization* mediaOpt); void SetPayloadType(uint8_t payloadType) { _payloadType = payloadType; }; - void SetCodecType(VideoCodecType codecType) {_codecType = codecType;}; void SetInternalSource(bool internalSource) { _internalSource = internalSource; }; private: - /* - * Map information from info into rtp. If no relevant information is found - * in info, rtp is set to NULL. - */ - static void CopyCodecSpecific(const CodecSpecificInfo& info, - RTPVideoHeader** rtp); - VCMPacketizationCallback* _sendCallback; media_optimization::MediaOptimization* _mediaOpt; - uint32_t _encodedBytes; uint8_t _payloadType; - VideoCodecType _codecType; bool _internalSource; + + EncodedImageCallback* post_encode_callback_; + #ifdef DEBUG_ENCODER_BIT_STREAM FILE* _bitStreamAfterEncoder; #endif diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/jitter_buffer.cc b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/jitter_buffer.cc index 3343b25ad53f..651f960b1675 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/jitter_buffer.cc +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/jitter_buffer.cc @@ -184,7 +184,6 @@ VCMJitterBuffer::VCMJitterBuffer(Clock* clock, incomplete_frames_(), last_decoded_state_(), first_packet_since_reset_(true), - receive_statistics_(), incoming_frame_rate_(0), incoming_frame_count_(0), time_last_incoming_frame_count_(0), @@ -209,7 +208,6 @@ VCMJitterBuffer::VCMJitterBuffer(Clock* clock, average_packets_per_frame_(0.0f), frame_counter_(0) { memset(frame_buffers_, 0, sizeof(frame_buffers_)); - memset(receive_statistics_, 0, sizeof(receive_statistics_)); for (int i = 0; i < kStartNumberOfFrames; i++) { frame_buffers_[i] = new VCMFrameBuffer(); @@ -255,8 +253,7 @@ void VCMJitterBuffer::CopyFrom(const VCMJitterBuffer& rhs) { assert(max_nack_list_size_ == rhs.max_nack_list_size_); assert(max_packet_age_to_nack_ == rhs.max_packet_age_to_nack_); assert(max_incomplete_time_ms_ == rhs.max_incomplete_time_ms_); - memcpy(receive_statistics_, rhs.receive_statistics_, - sizeof(receive_statistics_)); + receive_statistics_ = rhs.receive_statistics_; nack_seq_nums_.resize(rhs.nack_seq_nums_.size()); missing_sequence_numbers_ = rhs.missing_sequence_numbers_; latest_received_sequence_number_ = rhs.latest_received_sequence_number_; @@ -301,7 +298,7 @@ void VCMJitterBuffer::Start() { incoming_bit_count_ = 0; incoming_bit_rate_ = 0; time_last_incoming_frame_count_ = clock_->TimeInMilliseconds(); - memset(receive_statistics_, 0, sizeof(receive_statistics_)); + receive_statistics_.clear(); num_consecutive_old_frames_ = 0; num_consecutive_old_packets_ = 0; @@ -373,13 +370,9 @@ void VCMJitterBuffer::Flush() { } // Get received key and delta frames -void VCMJitterBuffer::FrameStatistics(uint32_t* received_delta_frames, - uint32_t* received_key_frames) const { - assert(received_delta_frames); - assert(received_key_frames); +std::map VCMJitterBuffer::FrameStatistics() const { CriticalSectionScoped cs(crit_sect_); - *received_delta_frames = receive_statistics_[1] + receive_statistics_[3]; - *received_key_frames = receive_statistics_[0] + receive_statistics_[2]; + return receive_statistics_; } int VCMJitterBuffer::num_discarded_packets() const { @@ -1252,26 +1245,7 @@ void VCMJitterBuffer::CountFrame(const VCMFrameBuffer& frame) { // Update receive statistics. We count all layers, thus when you use layers // adding all key and delta frames might differ from frame count. if (frame.IsSessionComplete()) { - switch (frame.FrameType()) { - case kVideoFrameKey: { - receive_statistics_[0]++; - break; - } - case kVideoFrameDelta: { - receive_statistics_[1]++; - break; - } - case kVideoFrameGolden: { - receive_statistics_[2]++; - break; - } - case kVideoFrameAltRef: { - receive_statistics_[3]++; - break; - } - default: - assert(false); - } + ++receive_statistics_[frame.FrameType()]; } } diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/jitter_buffer.h b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/jitter_buffer.h index 63fbb6eb3a48..bd7f07c9e88e 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/jitter_buffer.h +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/jitter_buffer.h @@ -98,10 +98,9 @@ class VCMJitterBuffer { // Empty the jitter buffer of all its data. void Flush(); - // Get the number of received key and delta frames since the jitter buffer + // Get the number of received frames, by type, since the jitter buffer // was started. - void FrameStatistics(uint32_t* received_delta_frames, - uint32_t* received_key_frames) const; + std::map FrameStatistics() const; // The number of packets discarded by the jitter buffer because the decoder // won't be able to decode them. @@ -297,8 +296,8 @@ class VCMJitterBuffer { bool first_packet_since_reset_; // Statistics. - // Frame counter for each type (key, delta, golden, key-delta). - unsigned int receive_statistics_[4]; + // Frame counts for each type (key, delta, ...) + std::map receive_statistics_; // Latest calculated frame rates of incoming stream. unsigned int incoming_frame_rate_; unsigned int incoming_frame_count_; diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/jitter_buffer_unittest.cc b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/jitter_buffer_unittest.cc index f596d6800277..e535a8a4043c 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/jitter_buffer_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/jitter_buffer_unittest.cc @@ -1632,11 +1632,9 @@ TEST_F(TestRunningJitterBuffer, EmptyPackets) { } TEST_F(TestRunningJitterBuffer, StatisticsTest) { - uint32_t num_delta_frames = 0; - uint32_t num_key_frames = 0; - jitter_buffer_->FrameStatistics(&num_delta_frames, &num_key_frames); - EXPECT_EQ(0u, num_delta_frames); - EXPECT_EQ(0u, num_key_frames); + std::map frame_stats(jitter_buffer_->FrameStatistics()); + EXPECT_EQ(0u, frame_stats[kVideoFrameDelta]); + EXPECT_EQ(0u, frame_stats[kVideoFrameKey]); uint32_t framerate = 0; uint32_t bitrate = 0; @@ -1654,9 +1652,9 @@ TEST_F(TestRunningJitterBuffer, StatisticsTest) { // being decoded. EXPECT_TRUE(DecodeCompleteFrame()); EXPECT_TRUE(DecodeCompleteFrame()); - jitter_buffer_->FrameStatistics(&num_delta_frames, &num_key_frames); - EXPECT_EQ(3u, num_delta_frames); - EXPECT_EQ(2u, num_key_frames); + frame_stats = jitter_buffer_->FrameStatistics(); + EXPECT_EQ(3u, frame_stats[kVideoFrameDelta]); + EXPECT_EQ(2u, frame_stats[kVideoFrameKey]); // Insert 20 more frames to get estimates of bitrate and framerate over // 1 second. diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/media_optimization.cc b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/media_optimization.cc index 051395145c69..4884e3ac3a41 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/media_optimization.cc +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/media_optimization.cc @@ -17,6 +17,61 @@ namespace webrtc { namespace media_optimization { +namespace { +void UpdateProtectionCallback( + VCMProtectionMethod* selected_method, + uint32_t* video_rate_bps, + uint32_t* nack_overhead_rate_bps, + uint32_t* fec_overhead_rate_bps, + VCMProtectionCallback* video_protection_callback) { + FecProtectionParams delta_fec_params; + FecProtectionParams key_fec_params; + // Get the FEC code rate for Key frames (set to 0 when NA). + key_fec_params.fec_rate = selected_method->RequiredProtectionFactorK(); + + // Get the FEC code rate for Delta frames (set to 0 when NA). + delta_fec_params.fec_rate = selected_method->RequiredProtectionFactorD(); + + // Get the FEC-UEP protection status for Key frames: UEP on/off. + key_fec_params.use_uep_protection = selected_method->RequiredUepProtectionK(); + + // Get the FEC-UEP protection status for Delta frames: UEP on/off. + delta_fec_params.use_uep_protection = + selected_method->RequiredUepProtectionD(); + + // The RTP module currently requires the same |max_fec_frames| for both + // key and delta frames. + delta_fec_params.max_fec_frames = selected_method->MaxFramesFec(); + key_fec_params.max_fec_frames = selected_method->MaxFramesFec(); + + // Set the FEC packet mask type. |kFecMaskBursty| is more effective for + // consecutive losses and little/no packet re-ordering. As we currently + // do not have feedback data on the degree of correlated losses and packet + // re-ordering, we keep default setting to |kFecMaskRandom| for now. + delta_fec_params.fec_mask_type = kFecMaskRandom; + key_fec_params.fec_mask_type = kFecMaskRandom; + + // TODO(Marco): Pass FEC protection values per layer. + video_protection_callback->ProtectionRequest(&delta_fec_params, + &key_fec_params, + video_rate_bps, + nack_overhead_rate_bps, + fec_overhead_rate_bps); +} +} // namespace + +struct MediaOptimization::EncodedFrameSample { + EncodedFrameSample(int size_bytes, + uint32_t timestamp, + int64_t time_complete_ms) + : size_bytes(size_bytes), + timestamp(timestamp), + time_complete_ms(time_complete_ms) {} + + uint32_t size_bytes; + uint32_t timestamp; + int64_t time_complete_ms; +}; MediaOptimization::MediaOptimization(int32_t id, Clock* clock) : id_(id), @@ -35,8 +90,6 @@ MediaOptimization::MediaOptimization(int32_t id, Clock* clock) target_bit_rate_(0), incoming_frame_rate_(0), enable_qm_(false), - video_protection_callback_(NULL), - video_qmsettings_callback_(NULL), encoded_frame_samples_(), avg_sent_bit_rate_bps_(0), avg_sent_framerate_(0), @@ -46,7 +99,11 @@ MediaOptimization::MediaOptimization(int32_t id, Clock* clock) qm_resolution_(new VCMQmResolution()), last_qm_update_time_(0), last_change_time_(0), - num_layers_(0) { + num_layers_(0), + suspension_enabled_(false), + video_suspended_(false), + suspension_threshold_bps_(0), + suspension_window_bps_(0) { memset(send_statistics_, 0, sizeof(send_statistics_)); memset(incoming_frame_times_, -1, sizeof(incoming_frame_times_)); } @@ -55,7 +112,8 @@ MediaOptimization::~MediaOptimization(void) { loss_prot_logic_->Release(); } -int32_t MediaOptimization::Reset() { +void MediaOptimization::Reset() { + SetEncodingData(kVideoCodecUnknown, 0, 0, 0, 0, 0, 0, max_payload_size_); memset(incoming_frame_times_, -1, sizeof(incoming_frame_times_)); incoming_frame_rate_ = 0.0; frame_dropper_->Reset(); @@ -77,12 +135,52 @@ int32_t MediaOptimization::Reset() { encoded_frame_samples_.clear(); avg_sent_bit_rate_bps_ = 0; num_layers_ = 1; - return VCM_OK; } -uint32_t MediaOptimization::SetTargetRates(uint32_t target_bitrate, - uint8_t fraction_lost, - uint32_t round_trip_time_ms) { +void MediaOptimization::SetEncodingData(VideoCodecType send_codec_type, + int32_t max_bit_rate, + uint32_t frame_rate, + uint32_t target_bitrate, + uint16_t width, + uint16_t height, + int num_layers, + int32_t mtu) { + // Everything codec specific should be reset here since this means the codec + // has changed. If native dimension values have changed, then either user + // initiated change, or QM initiated change. Will be able to determine only + // after the processing of the first frame. + last_change_time_ = clock_->TimeInMilliseconds(); + content_->Reset(); + content_->UpdateFrameRate(frame_rate); + + max_bit_rate_ = max_bit_rate; + send_codec_type_ = send_codec_type; + target_bit_rate_ = target_bitrate; + float target_bitrate_kbps = static_cast(target_bitrate) / 1000.0f; + loss_prot_logic_->UpdateBitRate(target_bitrate_kbps); + loss_prot_logic_->UpdateFrameRate(static_cast(frame_rate)); + loss_prot_logic_->UpdateFrameSize(width, height); + loss_prot_logic_->UpdateNumLayers(num_layers); + frame_dropper_->Reset(); + frame_dropper_->SetRates(target_bitrate_kbps, static_cast(frame_rate)); + user_frame_rate_ = static_cast(frame_rate); + codec_width_ = width; + codec_height_ = height; + num_layers_ = (num_layers <= 1) ? 1 : num_layers; // Can also be zero. + max_payload_size_ = mtu; + qm_resolution_->Initialize(target_bitrate_kbps, + user_frame_rate_, + codec_width_, + codec_height_, + num_layers_); +} + +uint32_t MediaOptimization::SetTargetRates( + uint32_t target_bitrate, + uint8_t fraction_lost, + uint32_t round_trip_time_ms, + VCMProtectionCallback* protection_callback, + VCMQMSettingsCallback* qmsettings_callback) { WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, id_, @@ -147,10 +245,13 @@ uint32_t MediaOptimization::SetTargetRates(uint32_t target_bitrate, // Get the bit cost of protection method, based on the amount of // overhead data actually transmitted (including headers) the last // second. - UpdateProtectionCallback(selected_method, - &sent_video_rate_bps, - &sent_nack_rate_bps, - &sent_fec_rate_bps); + if (protection_callback) { + UpdateProtectionCallback(selected_method, + &sent_video_rate_bps, + &sent_nack_rate_bps, + &sent_fec_rate_bps, + protection_callback); + } uint32_t sent_total_rate_bps = sent_video_rate_bps + sent_nack_rate_bps + sent_fec_rate_bps; // Estimate the overhead costs of the next second as staying the same @@ -180,7 +281,7 @@ uint32_t MediaOptimization::SetTargetRates(uint32_t target_bitrate, static_cast(target_bit_rate_) / 1000.0f; frame_dropper_->SetRates(target_video_bitrate_kbps, incoming_frame_rate_); - if (enable_qm_) { + if (enable_qm_ && qmsettings_callback) { WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, id_, @@ -198,53 +299,17 @@ uint32_t MediaOptimization::SetTargetRates(uint32_t target_bitrate, // Check for QM selection. bool select_qm = CheckStatusForQMchange(); if (select_qm) { - SelectQuality(); + SelectQuality(qmsettings_callback); } // Reset the short-term averaged content data. content_->ResetShortTermAvgData(); } + CheckSuspendConditions(); + return target_bit_rate_; } -int32_t MediaOptimization::SetEncodingData(VideoCodecType send_codec_type, - int32_t max_bit_rate, - uint32_t frame_rate, - uint32_t target_bitrate, - uint16_t width, - uint16_t height, - int num_layers) { - // Everything codec specific should be reset here since this means the codec - // has changed. If native dimension values have changed, then either user - // initiated change, or QM initiated change. Will be able to determine only - // after the processing of the first frame. - last_change_time_ = clock_->TimeInMilliseconds(); - content_->Reset(); - content_->UpdateFrameRate(frame_rate); - - max_bit_rate_ = max_bit_rate; - send_codec_type_ = send_codec_type; - target_bit_rate_ = target_bitrate; - float target_bitrate_kbps = static_cast(target_bitrate) / 1000.0f; - loss_prot_logic_->UpdateBitRate(target_bitrate_kbps); - loss_prot_logic_->UpdateFrameRate(static_cast(frame_rate)); - loss_prot_logic_->UpdateFrameSize(width, height); - loss_prot_logic_->UpdateNumLayers(num_layers); - frame_dropper_->Reset(); - frame_dropper_->SetRates(target_bitrate_kbps, static_cast(frame_rate)); - user_frame_rate_ = static_cast(frame_rate); - codec_width_ = width; - codec_height_ = height; - num_layers_ = (num_layers <= 1) ? 1 : num_layers; // Can also be zero. - int32_t ret = VCM_OK; - ret = qm_resolution_->Initialize(target_bitrate_kbps, - user_frame_rate_, - codec_width_, - codec_height_, - num_layers_); - return ret; -} - void MediaOptimization::EnableProtectionMethod(bool enable, VCMProtectionMethodEnum method) { bool updated = false; @@ -258,11 +323,6 @@ void MediaOptimization::EnableProtectionMethod(bool enable, } } -bool MediaOptimization::IsProtectionMethodEnabled( - VCMProtectionMethodEnum method) { - return (loss_prot_logic_->SelectedType() == method); -} - uint32_t MediaOptimization::InputFrameRate() { ProcessIncomingFrameRate(clock_->TimeInMilliseconds()); return uint32_t(incoming_frame_rate_ + 0.5f); @@ -281,6 +341,13 @@ uint32_t MediaOptimization::SentBitRate() { return avg_sent_bit_rate_bps_; } +VCMFrameCount MediaOptimization::SentFrameCount() { + VCMFrameCount count; + count.numDeltaFrames = delta_frame_cnt_; + count.numKeyFrames = key_frame_cnt_; + return count; +} + int32_t MediaOptimization::UpdateWithEncodedData(int encoded_length, uint32_t timestamp, FrameType encoded_frame_type) { @@ -300,8 +367,7 @@ int32_t MediaOptimization::UpdateWithEncodedData(int encoded_length, UpdateSentBitrate(now_ms); UpdateSentFramerate(); if (encoded_length > 0) { - const bool delta_frame = (encoded_frame_type != kVideoFrameKey && - encoded_frame_type != kVideoFrameGolden); + const bool delta_frame = (encoded_frame_type != kVideoFrameKey); frame_dropper_->Fill(encoded_length, delta_frame); if (max_payload_size_ > 0 && encoded_length > 0) { @@ -335,45 +401,22 @@ int32_t MediaOptimization::UpdateWithEncodedData(int encoded_length, return VCM_OK; } -int32_t MediaOptimization::RegisterProtectionCallback( - VCMProtectionCallback* protection_callback) { - video_protection_callback_ = protection_callback; - return VCM_OK; -} - -int32_t MediaOptimization::RegisterVideoQMCallback( - VCMQMSettingsCallback* video_qmsettings) { - WEBRTC_TRACE(webrtc::kTraceDebug, - webrtc::kTraceVideoCoding, - id_, - "RegisterVideoQMCallback: %p", video_qmsettings_callback_); - video_qmsettings_callback_ = video_qmsettings; - // Callback setting controls QM. - if (video_qmsettings_callback_ != NULL) { - enable_qm_ = true; - } else { - enable_qm_ = false; - } - return VCM_OK; -} +void MediaOptimization::EnableQM(bool enable) { enable_qm_ = enable; } void MediaOptimization::EnableFrameDropper(bool enable) { frame_dropper_->Enable(enable); } bool MediaOptimization::DropFrame() { + UpdateIncomingFrameRate(); // Leak appropriate number of bytes. frame_dropper_->Leak((uint32_t)(InputFrameRate() + 0.5f)); - + if (video_suspended_) { + return true; // Drop all frames when muted. + } return frame_dropper_->DropFrame(); } -int32_t MediaOptimization::SentFrameCount(VCMFrameCount* frame_count) const { - frame_count->numDeltaFrames = delta_frame_cnt_; - frame_count->numKeyFrames = key_frame_cnt_; - return VCM_OK; -} - void MediaOptimization::UpdateIncomingFrameRate() { int64_t now = clock_->TimeInMilliseconds(); if (incoming_frame_times_[0] == 0) { @@ -400,7 +443,8 @@ void MediaOptimization::UpdateContentData( } } -int32_t MediaOptimization::SelectQuality() { +int32_t MediaOptimization::SelectQuality( + VCMQMSettingsCallback* video_qmsettings_callback) { // Reset quantities for QM select. qm_resolution_->ResetQM(); @@ -418,7 +462,7 @@ int32_t MediaOptimization::SelectQuality() { } // Check for updates to spatial/temporal modes. - QMUpdate(qm); + QMUpdate(qm, video_qmsettings_callback); // Reset all the rate and related frame counters quantities. qm_resolution_->ResetRates(); @@ -432,51 +476,17 @@ int32_t MediaOptimization::SelectQuality() { return VCM_OK; } -// Private methods below this line. - -int MediaOptimization::UpdateProtectionCallback( - VCMProtectionMethod* selected_method, - uint32_t* video_rate_bps, - uint32_t* nack_overhead_rate_bps, - uint32_t* fec_overhead_rate_bps) { - if (!video_protection_callback_) { - return VCM_OK; - } - FecProtectionParams delta_fec_params; - FecProtectionParams key_fec_params; - // Get the FEC code rate for Key frames (set to 0 when NA). - key_fec_params.fec_rate = selected_method->RequiredProtectionFactorK(); - - // Get the FEC code rate for Delta frames (set to 0 when NA). - delta_fec_params.fec_rate = selected_method->RequiredProtectionFactorD(); - - // Get the FEC-UEP protection status for Key frames: UEP on/off. - key_fec_params.use_uep_protection = selected_method->RequiredUepProtectionK(); - - // Get the FEC-UEP protection status for Delta frames: UEP on/off. - delta_fec_params.use_uep_protection = - selected_method->RequiredUepProtectionD(); - - // The RTP module currently requires the same |max_fec_frames| for both - // key and delta frames. - delta_fec_params.max_fec_frames = selected_method->MaxFramesFec(); - key_fec_params.max_fec_frames = selected_method->MaxFramesFec(); - - // Set the FEC packet mask type. |kFecMaskBursty| is more effective for - // consecutive losses and little/no packet re-ordering. As we currently - // do not have feedback data on the degree of correlated losses and packet - // re-ordering, we keep default setting to |kFecMaskRandom| for now. - delta_fec_params.fec_mask_type = kFecMaskRandom; - key_fec_params.fec_mask_type = kFecMaskRandom; - - // TODO(Marco): Pass FEC protection values per layer. - return video_protection_callback_->ProtectionRequest(&delta_fec_params, - &key_fec_params, - video_rate_bps, - nack_overhead_rate_bps, - fec_overhead_rate_bps); +void MediaOptimization::SuspendBelowMinBitrate(int threshold_bps, + int window_bps) { + assert(threshold_bps > 0 && window_bps >= 0); + suspension_threshold_bps_ = threshold_bps; + suspension_window_bps_ = window_bps; + suspension_enabled_ = true; + video_suspended_ = false; } +bool MediaOptimization::IsVideoSuspended() const { return video_suspended_; } + void MediaOptimization::PurgeOldFrameSamples(int64_t now_ms) { while (!encoded_frame_samples_.empty()) { if (now_ms - encoded_frame_samples_.front().time_complete_ms > @@ -524,7 +534,9 @@ void MediaOptimization::UpdateSentFramerate() { } } -bool MediaOptimization::QMUpdate(VCMResolutionScale* qm) { +bool MediaOptimization::QMUpdate( + VCMResolutionScale* qm, + VCMQMSettingsCallback* video_qmsettings_callback) { // Check for no change. if (!qm->change_resolution_spatial && !qm->change_resolution_temporal) { return false; @@ -557,7 +569,7 @@ bool MediaOptimization::QMUpdate(VCMResolutionScale* qm) { // will vary/fluctuate, and since we don't want to change the state of the // VPM frame dropper, unless a temporal action was selected, we use the // quantity |qm->frame_rate| for updating. - video_qmsettings_callback_->SetVideoQMSettings( + video_qmsettings_callback->SetVideoQMSettings( qm->frame_rate, codec_width_, codec_height_); content_->UpdateFrameRate(qm->frame_rate); qm_resolution_->UpdateCodecParameters( @@ -610,5 +622,24 @@ void MediaOptimization::SetCPULoadState(CPULoadState state) { loadstate_ = state; } +void MediaOptimization::CheckSuspendConditions() { + // Check conditions for SuspendBelowMinBitrate. |target_bit_rate_| is in bps. + if (suspension_enabled_) { + if (!video_suspended_) { + // Check if we just went below the threshold. + if (target_bit_rate_ < suspension_threshold_bps_) { + video_suspended_ = true; + } + } else { + // Video is already suspended. Check if we just went over the threshold + // with a margin. + if (target_bit_rate_ > + suspension_threshold_bps_ + suspension_window_bps_) { + video_suspended_ = false; + } + } + } +} + } // namespace media_optimization } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/media_optimization.h b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/media_optimization.h index 16bcbe3da593..a8b14fb84d4f 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/media_optimization.h +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/media_optimization.h @@ -29,33 +29,25 @@ class VCMContentMetricsProcessing; namespace media_optimization { -enum { - kBitrateMaxFrameSamples = 60 -}; -enum { - kBitrateAverageWinMs = 1000 -}; - -struct EncodedFrameSample { - EncodedFrameSample(int size_bytes, - uint32_t timestamp, - int64_t time_complete_ms) - : size_bytes(size_bytes), - timestamp(timestamp), - time_complete_ms(time_complete_ms) {} - - uint32_t size_bytes; - uint32_t timestamp; - int64_t time_complete_ms; -}; - +// TODO(andresp): Make thread safe. class MediaOptimization { public: MediaOptimization(int32_t id, Clock* clock); - ~MediaOptimization(void); + ~MediaOptimization(); - // Resets the Media Optimization module. - int32_t Reset(); + // TODO(andresp): Can Reset and SetEncodingData be done at construction time + // only? + void Reset(); + + // Informs media optimization of initial encoding state. + void SetEncodingData(VideoCodecType send_codec_type, + int32_t max_bit_rate, + uint32_t frame_rate, + uint32_t bit_rate, + uint16_t width, + uint16_t height, + int num_temporal_layers, + int32_t mtu); // Sets target rates for the encoder given the channel parameters. // Inputs: target bitrate - the encoder target bitrate in bits/s. @@ -63,33 +55,27 @@ class MediaOptimization { // round_trip_time_ms - round trip time in milliseconds. // min_bit_rate - the bit rate of the end-point with lowest rate. // max_bit_rate - the bit rate of the end-point with highest rate. + // TODO(andresp): Find if the callbacks can be triggered only after releasing + // an internal critical section. uint32_t SetTargetRates(uint32_t target_bitrate, uint8_t fraction_lost, - uint32_t round_trip_time_ms); + uint32_t round_trip_time_ms, + VCMProtectionCallback* protection_callback, + VCMQMSettingsCallback* qmsettings_callback); - // Informs media optimization of initial encoding state. - int32_t SetEncodingData(VideoCodecType send_codec_type, - int32_t max_bit_rate, - uint32_t frame_rate, - uint32_t bit_rate, - uint16_t width, - uint16_t height, - int num_temporal_layers); - - // Enables protection method. void EnableProtectionMethod(bool enable, VCMProtectionMethodEnum method); + void EnableQM(bool enable); + void EnableFrameDropper(bool enable); - // Returns weather or not protection method is enabled. - bool IsProtectionMethodEnabled(VCMProtectionMethodEnum method); + // Lets the sender suspend video when the rate drops below + // |threshold_bps|, and turns back on when the rate goes back up above + // |threshold_bps| + |window_bps|. + void SuspendBelowMinBitrate(int threshold_bps, int window_bps); + bool IsVideoSuspended() const; - // Returns the actual input frame rate. - uint32_t InputFrameRate(); + bool DropFrame(); - // Returns the actual sent frame rate. - uint32_t SentFrameRate(); - - // Returns the actual sent bit rate. - uint32_t SentBitRate(); + void UpdateContentData(const VideoContentMetrics* content_metrics); // Informs Media Optimization of encoding output: Length and frame type. int32_t UpdateWithEncodedData(int encoded_length, @@ -99,62 +85,48 @@ class MediaOptimization { // Informs Media Optimization of CPU Load state void SetCPULoadState(CPULoadState state); - // Registers a protection callback to be used to inform the user about the - // protection methods used. - int32_t RegisterProtectionCallback( - VCMProtectionCallback* protection_callback); - - // Registers a quality settings callback to be used to inform VPM/user. - int32_t RegisterVideoQMCallback(VCMQMSettingsCallback* video_qmsettings); - - void EnableFrameDropper(bool enable); - - bool DropFrame(); - - // Returns the number of key/delta frames encoded. - int32_t SentFrameCount(VCMFrameCount* frame_count) const; - - // Updates incoming frame rate value. - void UpdateIncomingFrameRate(); - - // Update content metric data. - void UpdateContentData(const VideoContentMetrics* content_metrics); - - // Computes new Quality Mode. - int32_t SelectQuality(); - - // Accessors and mutators. - int32_t max_bit_rate() const { return max_bit_rate_; } - void set_max_payload_size(int32_t mtu) { max_payload_size_ = mtu; } + uint32_t InputFrameRate(); + uint32_t SentFrameRate(); + uint32_t SentBitRate(); + VCMFrameCount SentFrameCount(); private: - typedef std::list FrameSampleList; enum { kFrameCountHistorySize = 90 }; enum { kFrameHistoryWinMs = 2000 }; + enum { + kBitrateAverageWinMs = 1000 + }; - // Updates protection callback with protection settings. - int UpdateProtectionCallback(VCMProtectionMethod* selected_method, - uint32_t* total_video_rate_bps, - uint32_t* nack_overhead_rate_bps, - uint32_t* fec_overhead_rate_bps); + struct EncodedFrameSample; + typedef std::list FrameSampleList; + void UpdateIncomingFrameRate(); void PurgeOldFrameSamples(int64_t now_ms); void UpdateSentBitrate(int64_t now_ms); void UpdateSentFramerate(); + // Computes new Quality Mode. + int32_t SelectQuality(VCMQMSettingsCallback* qmsettings_callback); + // Verifies if QM settings differ from default, i.e. if an update is required. // Computes actual values, as will be sent to the encoder. - bool QMUpdate(VCMResolutionScale* qm); + bool QMUpdate(VCMResolutionScale* qm, + VCMQMSettingsCallback* qmsettings_callback); // Checks if we should make a QM change. Return true if yes, false otherwise. bool CheckStatusForQMchange(); void ProcessIncomingFrameRate(int64_t now); + // Checks conditions for suspending the video. The method compares + // |target_bit_rate_| with the threshold values for suspension, and changes + // the state of |video_suspended_| accordingly. + void CheckSuspendConditions(); + int32_t id_; Clock* clock_; int32_t max_bit_rate_; @@ -168,12 +140,10 @@ class MediaOptimization { uint32_t send_statistics_[4]; uint32_t send_statistics_zero_encode_; int32_t max_payload_size_; - uint32_t target_bit_rate_; + int target_bit_rate_; float incoming_frame_rate_; int64_t incoming_frame_times_[kFrameCountHistorySize]; bool enable_qm_; - VCMProtectionCallback* video_protection_callback_; - VCMQMSettingsCallback* video_qmsettings_callback_; std::list encoded_frame_samples_; uint32_t avg_sent_bit_rate_bps_; uint32_t avg_sent_framerate_; @@ -184,9 +154,12 @@ class MediaOptimization { int64_t last_qm_update_time_; int64_t last_change_time_; // Content/user triggered. int num_layers_; + bool suspension_enabled_; + bool video_suspended_; + int suspension_threshold_bps_; + int suspension_window_bps_; CPULoadState loadstate_; -}; // End of MediaOptimization class declaration. - +}; } // namespace media_optimization } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/media_optimization_unittest.cc b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/media_optimization_unittest.cc new file mode 100644 index 000000000000..d58ada651d5f --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/media_optimization_unittest.cc @@ -0,0 +1,121 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "testing/gtest/include/gtest/gtest.h" +#include "webrtc/modules/video_coding/main/source/media_optimization.h" +#include "webrtc/system_wrappers/interface/clock.h" + +namespace webrtc { +namespace media_optimization { + +class TestMediaOptimization : public ::testing::Test { + protected: + enum { + kId = 4711 // Id number for the MediaOptimization class. + }; + enum { + kSampleRate = 90000 // RTP timestamps per second. + }; + + // Note: simulated clock starts at 1 seconds, since parts of webrtc use 0 as + // a special case (e.g. frame rate in media optimization). + TestMediaOptimization() + : clock_(1000), + media_opt_(kId, &clock_), + frame_time_ms_(33), + next_timestamp_(0) {} + + // This method mimics what happens in VideoSender::AddVideoFrame. + void AddFrameAndAdvanceTime(int bitrate_bps, bool expect_frame_drop) { + ASSERT_GE(bitrate_bps, 0); + bool frame_dropped = media_opt_.DropFrame(); + EXPECT_EQ(expect_frame_drop, frame_dropped); + if (!frame_dropped) { + int bytes_per_frame = bitrate_bps * frame_time_ms_ / (8 * 1000); + ASSERT_EQ(VCM_OK, media_opt_.UpdateWithEncodedData( + bytes_per_frame, next_timestamp_, kVideoFrameDelta)); + } + next_timestamp_ += frame_time_ms_ * kSampleRate / 1000; + clock_.AdvanceTimeMilliseconds(frame_time_ms_); + } + + SimulatedClock clock_; + MediaOptimization media_opt_; + int frame_time_ms_; + uint32_t next_timestamp_; +}; + + +TEST_F(TestMediaOptimization, VerifyMuting) { + // Enable video suspension with these limits. + // Suspend the video when the rate is below 50 kbps and resume when it gets + // above 50 + 10 kbps again. + const int kThresholdBps = 50000; + const int kWindowBps = 10000; + media_opt_.SuspendBelowMinBitrate(kThresholdBps, kWindowBps); + + // The video should not be suspended from the start. + EXPECT_FALSE(media_opt_.IsVideoSuspended()); + + int target_bitrate_kbps = 100; + media_opt_.SetTargetRates(target_bitrate_kbps * 1000, + 0, // Lossrate. + 100, + NULL, + NULL); // RTT in ms. + media_opt_.EnableFrameDropper(true); + for (int time = 0; time < 2000; time += frame_time_ms_) { + ASSERT_NO_FATAL_FAILURE(AddFrameAndAdvanceTime(target_bitrate_kbps, false)); + } + + // Set the target rate below the limit for muting. + media_opt_.SetTargetRates(kThresholdBps - 1000, + 0, // Lossrate. + 100, + NULL, + NULL); // RTT in ms. + // Expect the muter to engage immediately and stay muted. + // Test during 2 seconds. + for (int time = 0; time < 2000; time += frame_time_ms_) { + EXPECT_TRUE(media_opt_.IsVideoSuspended()); + ASSERT_NO_FATAL_FAILURE(AddFrameAndAdvanceTime(target_bitrate_kbps, true)); + } + + // Set the target above the limit for muting, but not above the + // limit + window. + media_opt_.SetTargetRates(kThresholdBps + 1000, + 0, // Lossrate. + 100, + NULL, + NULL); // RTT in ms. + // Expect the muter to stay muted. + // Test during 2 seconds. + for (int time = 0; time < 2000; time += frame_time_ms_) { + EXPECT_TRUE(media_opt_.IsVideoSuspended()); + ASSERT_NO_FATAL_FAILURE(AddFrameAndAdvanceTime(target_bitrate_kbps, true)); + } + + // Set the target above limit + window. + media_opt_.SetTargetRates(kThresholdBps + kWindowBps + 1000, + 0, // Lossrate. + 100, + NULL, + NULL); // RTT in ms. + // Expect the muter to disengage immediately. + // Test during 2 seconds. + for (int time = 0; time < 2000; time += frame_time_ms_) { + EXPECT_FALSE(media_opt_.IsVideoSuspended()); + ASSERT_NO_FATAL_FAILURE( + AddFrameAndAdvanceTime((kThresholdBps + kWindowBps) / 1000, false)); + } +} + +} // namespace media_optimization +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/receiver.cc b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/receiver.cc index e0969ef5a071..ae13ddd4211d 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/receiver.cc +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/receiver.cc @@ -238,8 +238,9 @@ void VCMReceiver::ReceiveStatistics(uint32_t* bitrate, void VCMReceiver::ReceivedFrameCount(VCMFrameCount* frame_count) const { assert(frame_count); - jitter_buffer_.FrameStatistics(&frame_count->numDeltaFrames, - &frame_count->numKeyFrames); + std::map counts(jitter_buffer_.FrameStatistics()); + frame_count->numDeltaFrames = counts[kVideoFrameDelta]; + frame_count->numKeyFrames = counts[kVideoFrameKey]; } uint32_t VCMReceiver::DiscardedPackets() const { diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/timing.cc b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/timing.cc index 97fdac98bc92..98a69e962fd8 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/timing.cc +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/timing.cc @@ -36,6 +36,7 @@ VCMTiming::VCMTiming(Clock* clock, min_playout_delay_ms_(0), jitter_delay_ms_(0), current_delay_ms_(0), + last_decode_ms_(0), prev_frame_timestamp_(0) { if (master_timing == NULL) { master_ = true; @@ -158,7 +159,7 @@ int32_t VCMTiming::StopDecodeTimer(uint32_t time_stamp, timing_id_), "Codec timer error: %d", time_diff_ms); assert(false); } - + last_decode_ms_ = time_diff_ms; if (master_) { WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(vcm_id_, timing_id_), @@ -262,4 +263,21 @@ uint32_t VCMTiming::TargetDelayInternal() const { jitter_delay_ms_ + MaxDecodeTimeMs() + render_delay_ms_); } +void VCMTiming::GetTimings(int* decode_ms, + int* max_decode_ms, + int* current_delay_ms, + int* target_delay_ms, + int* jitter_buffer_ms, + int* min_playout_delay_ms, + int* render_delay_ms) const { + CriticalSectionScoped cs(crit_sect_); + *decode_ms = last_decode_ms_; + *max_decode_ms = MaxDecodeTimeMs(); + *current_delay_ms = current_delay_ms_; + *target_delay_ms = TargetDelayInternal(); + *jitter_buffer_ms = jitter_delay_ms_; + *min_playout_delay_ms = min_playout_delay_ms_; + *render_delay_ms = render_delay_ms_; +} + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/timing.h b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/timing.h index 3148a7a3cc8a..eb251b711537 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/timing.h +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/timing.h @@ -82,6 +82,15 @@ class VCMTiming { // certain amount of processing time. bool EnoughTimeToDecode(uint32_t available_processing_time_ms) const; + // Return current timing information. + void GetTimings(int* decode_ms, + int* max_decode_ms, + int* current_delay_ms, + int* target_delay_ms, + int* jitter_buffer_ms, + int* min_playout_delay_ms, + int* render_delay_ms) const; + enum { kDefaultRenderDelayMs = 10 }; enum { kDelayMaxChangeMsPerS = 100 }; @@ -102,6 +111,7 @@ class VCMTiming { uint32_t min_playout_delay_ms_; uint32_t jitter_delay_ms_; uint32_t current_delay_ms_; + int last_decode_ms_; uint32_t prev_frame_timestamp_; }; } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_coding.gypi b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_coding.gypi index 0c8644bc52ed..b4f6cb7b1c8f 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_coding.gypi +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_coding.gypi @@ -18,18 +18,6 @@ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', '<(webrtc_vp8_dir)/vp8.gyp:webrtc_vp8', ], - 'include_dirs': [ - '../interface', - '../../../interface', - '../../codecs/interface', - '../../../../common_video/interface', - ], - 'direct_dependent_settings': { - 'include_dirs': [ - '../interface', - '../../codecs/interface', - ], - }, 'sources': [ # interfaces '../interface/video_coding.h', diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_coding_impl.cc b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_coding_impl.cc index 0a211ef8082f..18d6ba104b76 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_coding_impl.cc +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_coding_impl.cc @@ -45,6 +45,36 @@ VCMProcessTimer::Processed() { } // namespace vcm namespace { +// This wrapper provides a way to modify the callback without the need to expose +// a register method all the way down to the function calling it. +class EncodedImageCallbackWrapper : public EncodedImageCallback { + public: + EncodedImageCallbackWrapper() + : cs_(CriticalSectionWrapper::CreateCriticalSection()), callback_(NULL) {} + + virtual ~EncodedImageCallbackWrapper() {} + + void Register(EncodedImageCallback* callback) { + CriticalSectionScoped cs(cs_.get()); + callback_ = callback; + } + + // TODO(andresp): Change to void as return value is ignored. + virtual int32_t Encoded(EncodedImage& encoded_image, + const CodecSpecificInfo* codec_specific_info, + const RTPFragmentationHeader* fragmentation) { + CriticalSectionScoped cs(cs_.get()); + if (callback_) + return callback_->Encoded( + encoded_image, codec_specific_info, fragmentation); + return 0; + } + + private: + scoped_ptr cs_; + EncodedImageCallback* callback_ GUARDED_BY(cs_); +}; + class VideoCodingModuleImpl : public VideoCodingModule { public: VideoCodingModuleImpl(const int32_t id, @@ -52,7 +82,7 @@ class VideoCodingModuleImpl : public VideoCodingModule { EventFactory* event_factory, bool owns_event_factory) : VideoCodingModule(), - sender_(new vcm::VideoSender(id, clock)), + sender_(new vcm::VideoSender(id, clock, &post_encode_callback_)), receiver_(new vcm::VideoReceiver(id, clock, event_factory)), own_event_factory_(owns_event_factory ? event_factory : NULL) {} @@ -194,7 +224,16 @@ class VideoCodingModuleImpl : public VideoCodingModule { } virtual int StopDebugRecording() OVERRIDE { - return sender_->StopDebugRecording(); + sender_->StopDebugRecording(); + return VCM_OK; + } + + virtual void SuspendBelowMinBitrate() { + return sender_->SuspendBelowMinBitrate(); + } + + virtual bool VideoSuspended() const { + return sender_->VideoSuspended(); } virtual int32_t InitializeReceiver() OVERRIDE { @@ -225,6 +264,11 @@ class VideoCodingModuleImpl : public VideoCodingModule { return receiver_->RegisterReceiveStatisticsCallback(receiveStats); } + virtual int32_t RegisterDecoderTimingCallback( + VCMDecoderTimingCallback* decoderTiming) OVERRIDE { + return receiver_->RegisterDecoderTimingCallback(decoderTiming); + } + virtual int32_t RegisterFrameTypeCallback( VCMFrameTypeCallback* frameTypeCallback) OVERRIDE { return receiver_->RegisterFrameTypeCallback(frameTypeCallback); @@ -310,7 +354,18 @@ class VideoCodingModuleImpl : public VideoCodingModule { return receiver_->SetReceiveChannelParameters(rtt); } + virtual void RegisterPreDecodeImageCallback( + EncodedImageCallback* observer) OVERRIDE { + receiver_->RegisterPreDecodeImageCallback(observer); + } + + virtual void RegisterPostEncodeImageCallback( + EncodedImageCallback* observer) OVERRIDE { + post_encode_callback_.Register(observer); + } + private: + EncodedImageCallbackWrapper post_encode_callback_; scoped_ptr sender_; scoped_ptr receiver_; scoped_ptr own_event_factory_; diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_coding_impl.h b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_coding_impl.h index 566207caf863..0bd99b4ba9cc 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_coding_impl.h +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_coding_impl.h @@ -27,8 +27,13 @@ #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" namespace webrtc { + +class EncodedFrameObserver; + namespace vcm { +class DebugRecorder; + class VCMProcessTimer { public: VCMProcessTimer(uint32_t periodMs, Clock* clock) @@ -49,7 +54,10 @@ class VideoSender { public: typedef VideoCodingModule::SenderNackMode SenderNackMode; - VideoSender(const int32_t id, Clock* clock); + VideoSender(const int32_t id, + Clock* clock, + EncodedImageCallback* post_encode_callback); + ~VideoSender(); int32_t InitializeSender(); @@ -65,7 +73,8 @@ class VideoSender { uint8_t payloadType, bool internalSource); - int32_t CodecConfigParameters(uint8_t* buffer, int32_t size); + int32_t CodecConfigParameters(uint8_t* buffer, int32_t size) const; + int32_t SentFrameCount(VCMFrameCount* frameCount); int Bitrate(unsigned int* bitrate) const; int FrameRate(unsigned int* framerate) const; @@ -85,7 +94,6 @@ class VideoSender { int32_t IntraFrameRequest(int stream_index); int32_t EnableFrameDropper(bool enable); - int32_t SentFrameCount(VCMFrameCount* frameCount) const; int SetSenderNackMode(SenderNackMode mode); int SetSenderReferenceSelection(bool enable); @@ -95,7 +103,10 @@ class VideoSender { void SetCPULoadState(CPULoadState state); int StartDebugRecording(const char* file_name_utf8); - int StopDebugRecording(); + void StopDebugRecording(); + + void SuspendBelowMinBitrate(); + bool VideoSuspended() const; int32_t TimeUntilNextProcess(); int32_t Process(); @@ -104,6 +115,8 @@ class VideoSender { int32_t _id; Clock* clock_; + scoped_ptr recorder_; + scoped_ptr process_crit_sect_; CriticalSectionWrapper* _sendCritSect; VCMGenericEncoder* _encoder; @@ -111,10 +124,12 @@ class VideoSender { std::vector _nextFrameTypes; media_optimization::MediaOptimization _mediaOpt; VCMSendStatisticsCallback* _sendStatsCallback; - FILE* _encoderInputFile; VCMCodecDataBase _codecDataBase; bool frame_dropper_enabled_; VCMProcessTimer _sendStatsTimer; + + VCMQMSettingsCallback* qm_settings_callback_; + VCMProtectionCallback* protection_callback_; }; class VideoReceiver { @@ -135,6 +150,8 @@ class VideoReceiver { int32_t RegisterReceiveCallback(VCMReceiveCallback* receiveCallback); int32_t RegisterReceiveStatisticsCallback( VCMReceiveStatisticsCallback* receiveStats); + int32_t RegisterDecoderTimingCallback( + VCMDecoderTimingCallback* decoderTiming); int32_t RegisterFrameTypeCallback(VCMFrameTypeCallback* frameTypeCallback); int32_t RegisterPacketRequestCallback(VCMPacketRequestCallback* callback); int RegisterRenderBufferSizeCallback(VCMRenderBufferSizeCallback* callback); @@ -170,6 +187,8 @@ class VideoReceiver { int32_t TimeUntilNextProcess(); int32_t Process(); + void RegisterPreDecodeImageCallback(EncodedImageCallback* observer); + protected: int32_t Decode(const webrtc::VCMEncodedFrame& frame); int32_t RequestKeyFrame(); @@ -198,6 +217,7 @@ class VideoReceiver { VCMDecodedFrameCallback _dualDecodedFrameCallback; VCMFrameTypeCallback* _frameTypeCallback; VCMReceiveStatisticsCallback* _receiveStatsCallback; + VCMDecoderTimingCallback* _decoderTimingCallback; VCMPacketRequestCallback* _packetRequestCallback; VCMRenderBufferSizeCallback* render_buffer_callback_; VCMGenericDecoder* _decoder; @@ -209,6 +229,7 @@ class VideoReceiver { VCMKeyRequestMode _keyRequestMode; bool _scheduleKeyRequest; size_t max_nack_list_size_; + EncodedImageCallback* pre_decode_image_callback_; VCMCodecDataBase _codecDataBase; VCMProcessTimer _receiveStatsTimer; diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_coding_test.gypi b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_coding_test.gypi index 9dd68af6c2f3..a64e02d02c85 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_coding_test.gypi +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_coding_test.gypi @@ -21,13 +21,6 @@ '<(webrtc_root)/test/metrics.gyp:metrics', '<(webrtc_root)/common_video/common_video.gyp:common_video', ], - 'include_dirs': [ - '../../../interface', - '../../codecs/vp8/include', - '../../../../system_wrappers/interface', - '../../../../common_video/interface', - '../source', - ], 'sources': [ # headers '../test/codec_database_test.h', diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_receiver.cc b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_receiver.cc index 7528aabfdfff..bd99311e037f 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_receiver.cc +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_receiver.cc @@ -40,6 +40,7 @@ VideoReceiver::VideoReceiver(const int32_t id, _dualDecodedFrameCallback(_dualTiming, clock_), _frameTypeCallback(NULL), _receiveStatsCallback(NULL), + _decoderTimingCallback(NULL), _packetRequestCallback(NULL), render_buffer_callback_(NULL), _decoder(NULL), @@ -51,6 +52,7 @@ VideoReceiver::VideoReceiver(const int32_t id, _keyRequestMode(kKeyOnError), _scheduleKeyRequest(false), max_nack_list_size_(0), + pre_decode_image_callback_(NULL), _codecDataBase(id), _receiveStatsTimer(1000, clock_), _retransmissionTimer(10, clock_), @@ -85,6 +87,30 @@ int32_t VideoReceiver::Process() { _receiveStatsCallback->OnReceiveStatisticsUpdate(bitRate, frameRate); } + if (_decoderTimingCallback != NULL) { + int decode_ms; + int max_decode_ms; + int current_delay_ms; + int target_delay_ms; + int jitter_buffer_ms; + int min_playout_delay_ms; + int render_delay_ms; + _timing.GetTimings(&decode_ms, + &max_decode_ms, + ¤t_delay_ms, + &target_delay_ms, + &jitter_buffer_ms, + &min_playout_delay_ms, + &render_delay_ms); + _decoderTimingCallback->OnDecoderTiming(decode_ms, + max_decode_ms, + current_delay_ms, + target_delay_ms, + jitter_buffer_ms, + min_playout_delay_ms, + render_delay_ms); + } + // Size of render buffer. if (render_buffer_callback_) { int buffer_size_ms = _receiver.RenderBufferSizeMs(); @@ -95,8 +121,12 @@ int32_t VideoReceiver::Process() { // Key frame requests if (_keyRequestTimer.TimeUntilProcess() == 0) { _keyRequestTimer.Processed(); - CriticalSectionScoped cs(process_crit_sect_.get()); - if (_scheduleKeyRequest && _frameTypeCallback != NULL) { + bool request_key_frame = false; + { + CriticalSectionScoped cs(process_crit_sect_.get()); + request_key_frame = _scheduleKeyRequest && _frameTypeCallback != NULL; + } + if (request_key_frame) { const int32_t ret = RequestKeyFrame(); if (ret != VCM_OK && returnValue == VCM_OK) { returnValue = ret; @@ -109,16 +139,24 @@ int32_t VideoReceiver::Process() { // disabled when NACK is off. if (_retransmissionTimer.TimeUntilProcess() == 0) { _retransmissionTimer.Processed(); - CriticalSectionScoped cs(process_crit_sect_.get()); - if (_packetRequestCallback != NULL) { - uint16_t length = max_nack_list_size_; + bool callback_registered = false; + uint16_t length; + { + CriticalSectionScoped cs(process_crit_sect_.get()); + length = max_nack_list_size_; + callback_registered = _packetRequestCallback != NULL; + } + if (callback_registered && length > 0) { std::vector nackList(length); const int32_t ret = NackList(&nackList[0], &length); if (ret != VCM_OK && returnValue == VCM_OK) { returnValue = ret; } - if (length > 0) { - _packetRequestCallback->ResendPackets(&nackList[0], length); + if (ret == VCM_OK && length > 0) { + CriticalSectionScoped cs(process_crit_sect_.get()); + if (_packetRequestCallback != NULL) { + _packetRequestCallback->ResendPackets(&nackList[0], length); + } } } } @@ -255,6 +293,7 @@ int32_t VideoReceiver::InitializeReceiver() { _receiverInited = true; _frameTypeCallback = NULL; _receiveStatsCallback = NULL; + _decoderTimingCallback = NULL; _packetRequestCallback = NULL; _keyRequestMode = kKeyOnError; _scheduleKeyRequest = false; @@ -278,6 +317,13 @@ int32_t VideoReceiver::RegisterReceiveStatisticsCallback( return VCM_OK; } +int32_t VideoReceiver::RegisterDecoderTimingCallback( + VCMDecoderTimingCallback* decoderTiming) { + CriticalSectionScoped cs(process_crit_sect_.get()); + _decoderTimingCallback = decoderTiming; + return VCM_OK; +} + // Register an externally defined decoder/render object. // Can be a decoder only or a decoder coupled with a renderer. int32_t VideoReceiver::RegisterExternalDecoder(VideoDecoder* externalDecoder, @@ -367,6 +413,11 @@ int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) { _timing.UpdateCurrentDelay(frame->RenderTimeMs(), clock_->TimeInMilliseconds()); + if (pre_decode_image_callback_) { + EncodedImage encoded_image(frame->EncodedImage()); + pre_decode_image_callback_->Encoded(encoded_image); + } + #ifdef DEBUG_DECODER_BIT_STREAM if (_bitStreamBeforeDecoder != NULL) { // Write bit stream to file for debugging purposes @@ -413,7 +464,7 @@ int32_t VideoReceiver::RequestSliceLossIndication( int32_t VideoReceiver::RequestKeyFrame() { TRACE_EVENT0("webrtc", "RequestKeyFrame"); - CriticalSectionScoped cs(process_crit_sect_.get()); + CriticalSectionScoped process_cs(process_crit_sect_.get()); if (_frameTypeCallback != NULL) { const int32_t ret = _frameTypeCallback->RequestKeyFrame(); if (ret < 0) { @@ -508,6 +559,7 @@ int32_t VideoReceiver::Decode(const VCMEncodedFrame& frame) { int32_t ret = _decoder->Decode(frame, clock_->TimeInMilliseconds()); // Check for failed decoding, run frame type request callback if needed. + bool request_key_frame = false; if (ret < 0) { if (ret == VCM_ERROR_REQUEST_SLI) { return RequestSliceLossIndication( @@ -518,52 +570,59 @@ int32_t VideoReceiver::Decode(const VCMEncodedFrame& frame) { VCMId(_id), "Failed to decode frame %u, requesting key frame", frame.TimeStamp()); - ret = RequestKeyFrame(); + request_key_frame = true; } } else if (ret == VCM_REQUEST_SLI) { ret = RequestSliceLossIndication( _decodedFrameCallback.LastReceivedPictureID() + 1); } if (!frame.Complete() || frame.MissingFrame()) { - CriticalSectionScoped cs(process_crit_sect_.get()); switch (_keyRequestMode) { case kKeyOnKeyLoss: { if (frame.FrameType() == kVideoFrameKey) { - _scheduleKeyRequest = true; - return VCM_OK; + request_key_frame = true; + ret = VCM_OK; } break; } case kKeyOnLoss: { - _scheduleKeyRequest = true; - return VCM_OK; + request_key_frame = true; + ret = VCM_OK; } default: break; } } + if (request_key_frame) { + CriticalSectionScoped cs(process_crit_sect_.get()); + _scheduleKeyRequest = true; + } TRACE_EVENT_ASYNC_END0("webrtc", "Video", frame.TimeStamp()); return ret; } // Reset the decoder state int32_t VideoReceiver::ResetDecoder() { - CriticalSectionScoped cs(_receiveCritSect); - if (_decoder != NULL) { - _receiver.Initialize(); - _timing.Reset(); - { - CriticalSectionScoped cs(process_crit_sect_.get()); - _scheduleKeyRequest = false; + bool reset_key_request = false; + { + CriticalSectionScoped cs(_receiveCritSect); + if (_decoder != NULL) { + _receiver.Initialize(); + _timing.Reset(); + reset_key_request = true; + _decoder->Reset(); + } + if (_dualReceiver.State() != kPassive) { + _dualReceiver.Initialize(); + } + if (_dualDecoder != NULL) { + _codecDataBase.ReleaseDecoder(_dualDecoder); + _dualDecoder = NULL; } - _decoder->Reset(); } - if (_dualReceiver.State() != kPassive) { - _dualReceiver.Initialize(); - } - if (_dualDecoder != NULL) { - _codecDataBase.ReleaseDecoder(_dualDecoder); - _dualDecoder = NULL; + if (reset_key_request) { + CriticalSectionScoped cs(process_crit_sect_.get()); + _scheduleKeyRequest = false; } return VCM_OK; } @@ -681,7 +740,6 @@ int32_t VideoReceiver::NackList(uint16_t* nackList, uint16_t* size) { return VCM_MEMORY; } case kNackKeyFrameRequest: { - CriticalSectionScoped cs(_receiveCritSect); WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding, VCMId(_id), @@ -782,5 +840,11 @@ int VideoReceiver::SetMinReceiverDelay(int desired_delay_ms) { return _receiver.SetMinReceiverDelay(desired_delay_ms); } +void VideoReceiver::RegisterPreDecodeImageCallback( + EncodedImageCallback* observer) { + CriticalSectionScoped cs(_receiveCritSect); + pre_decode_image_callback_ = observer; +} + } // namespace vcm } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_sender.cc b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_sender.cc index 6463353a31f9..edfb67e30d6b 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_sender.cc +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_sender.cc @@ -10,6 +10,8 @@ #include "webrtc/common_types.h" +#include // std::max + #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" #include "webrtc/modules/video_coding/main/source/encoded_frame.h" @@ -19,26 +21,63 @@ namespace webrtc { namespace vcm { -VideoSender::VideoSender(const int32_t id, Clock* clock) +class DebugRecorder { + public: + DebugRecorder() + : cs_(CriticalSectionWrapper::CreateCriticalSection()), file_(NULL) {} + + ~DebugRecorder() { Stop(); } + + int Start(const char* file_name_utf8) { + CriticalSectionScoped cs(cs_.get()); + if (file_) + fclose(file_); + file_ = fopen(file_name_utf8, "wb"); + if (!file_) + return VCM_GENERAL_ERROR; + return VCM_OK; + } + + void Stop() { + CriticalSectionScoped cs(cs_.get()); + if (file_) { + fclose(file_); + file_ = NULL; + } + } + + void Add(const I420VideoFrame& frame) { + CriticalSectionScoped cs(cs_.get()); + if (file_) + PrintI420VideoFrame(frame, file_); + } + + private: + scoped_ptr cs_; + FILE* file_ GUARDED_BY(cs_); +}; + +VideoSender::VideoSender(const int32_t id, + Clock* clock, + EncodedImageCallback* post_encode_callback) : _id(id), clock_(clock), + recorder_(new DebugRecorder()), process_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()), _sendCritSect(CriticalSectionWrapper::CreateCriticalSection()), _encoder(), - _encodedFrameCallback(), + _encodedFrameCallback(post_encode_callback), _nextFrameTypes(1, kVideoFrameDelta), _mediaOpt(id, clock_), _sendStatsCallback(NULL), - _encoderInputFile(NULL), _codecDataBase(id), frame_dropper_enabled_(true), - _sendStatsTimer(1000, clock_) {} + _sendStatsTimer(1000, clock_), + qm_settings_callback_(NULL), + protection_callback_(NULL) {} VideoSender::~VideoSender() { delete _sendCritSect; - if (_encoderInputFile != NULL) { - fclose(_encoderInputFile); - } } int32_t VideoSender::Process() { @@ -68,8 +107,6 @@ int32_t VideoSender::InitializeSender() { _codecDataBase.ResetSender(); _encoder = NULL; _encodedFrameCallback.SetTransportCallback(NULL); - // setting default bitRate and frameRate to 0 - _mediaOpt.SetEncodingData(kVideoCodecUnknown, 0, 0, 0, 0, 0, 0); _mediaOpt.Reset(); // Resetting frame dropper return VCM_OK; } @@ -123,9 +160,8 @@ int32_t VideoSender::RegisterSendCodec(const VideoCodec* sendCodec, sendCodec->startBitrate * 1000, sendCodec->width, sendCodec->height, - numLayers); - _mediaOpt.set_max_payload_size(maxPayloadSize); - + numLayers, + maxPayloadSize); return VCM_OK; } @@ -169,7 +205,8 @@ int32_t VideoSender::RegisterExternalEncoder(VideoEncoder* externalEncoder, } // Get codec config parameters -int32_t VideoSender::CodecConfigParameters(uint8_t* buffer, int32_t size) { +int32_t VideoSender::CodecConfigParameters(uint8_t* buffer, + int32_t size) const { CriticalSectionScoped cs(_sendCritSect); if (_encoder != NULL) { return _encoder->CodecConfigParameters(buffer, size); @@ -177,6 +214,14 @@ int32_t VideoSender::CodecConfigParameters(uint8_t* buffer, int32_t size) { return VCM_UNINITIALIZED; } +// TODO(andresp): Make const once media_opt is thread-safe and this has a +// pointer to it. +int32_t VideoSender::SentFrameCount(VCMFrameCount* frameCount) { + CriticalSectionScoped cs(_sendCritSect); + *frameCount = _mediaOpt.SentFrameCount(); + return VCM_OK; +} + // Get encode bitrate int VideoSender::Bitrate(unsigned int* bitrate) const { CriticalSectionScoped cs(_sendCritSect); @@ -206,8 +251,11 @@ int32_t VideoSender::SetChannelParameters(uint32_t target_bitrate, int32_t ret = 0; { CriticalSectionScoped sendCs(_sendCritSect); - uint32_t targetRate = - _mediaOpt.SetTargetRates(target_bitrate, lossRate, rtt); + uint32_t targetRate = _mediaOpt.SetTargetRates(target_bitrate, + lossRate, + rtt, + protection_callback_, + qm_settings_callback_); if (_encoder != NULL) { ret = _encoder->SetChannelParameters(lossRate, rtt); if (ret < 0) { @@ -245,17 +293,19 @@ int32_t VideoSender::RegisterSendStatisticsCallback( // Register a video quality settings callback which will be called when frame // rate/dimensions need to be updated for video quality optimization int32_t VideoSender::RegisterVideoQMCallback( - VCMQMSettingsCallback* videoQMSettings) { + VCMQMSettingsCallback* qm_settings_callback) { CriticalSectionScoped cs(_sendCritSect); - return _mediaOpt.RegisterVideoQMCallback(videoQMSettings); + qm_settings_callback_ = qm_settings_callback; + _mediaOpt.EnableQM(qm_settings_callback_ != NULL); + return VCM_OK; } // Register a video protection callback which will be called to deliver the // requested FEC rate and NACK status (on/off). int32_t VideoSender::RegisterProtectionCallback( - VCMProtectionCallback* protection) { + VCMProtectionCallback* protection_callback) { CriticalSectionScoped cs(_sendCritSect); - _mediaOpt.RegisterProtectionCallback(protection); + protection_callback_ = protection_callback; return VCM_OK; } @@ -312,8 +362,6 @@ int32_t VideoSender::AddVideoFrame(const I420VideoFrame& videoFrame, if (_nextFrameTypes[0] == kFrameEmpty) { return VCM_OK; } - _mediaOpt.UpdateIncomingFrameRate(); - if (_mediaOpt.DropFrame()) { WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideoCoding, @@ -323,11 +371,7 @@ int32_t VideoSender::AddVideoFrame(const I420VideoFrame& videoFrame, _mediaOpt.UpdateContentData(contentMetrics); int32_t ret = _encoder->Encode(videoFrame, codecSpecificInfo, _nextFrameTypes); - if (_encoderInputFile != NULL) { - if (PrintI420VideoFrame(videoFrame, _encoderInputFile) < 0) { - return -1; - } - } + recorder_->Add(videoFrame); if (ret < 0) { WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding, @@ -367,11 +411,6 @@ int32_t VideoSender::EnableFrameDropper(bool enable) { return VCM_OK; } -int32_t VideoSender::SentFrameCount(VCMFrameCount* frameCount) const { - CriticalSectionScoped cs(_sendCritSect); - return _mediaOpt.SentFrameCount(frameCount); -} - int VideoSender::SetSenderNackMode(SenderNackMode mode) { CriticalSectionScoped cs(_sendCritSect); @@ -404,20 +443,35 @@ int VideoSender::SetSenderKeyFramePeriod(int periodMs) { } int VideoSender::StartDebugRecording(const char* file_name_utf8) { - CriticalSectionScoped cs(_sendCritSect); - _encoderInputFile = fopen(file_name_utf8, "wb"); - if (_encoderInputFile == NULL) - return VCM_GENERAL_ERROR; - return VCM_OK; + return recorder_->Start(file_name_utf8); } -int VideoSender::StopDebugRecording() { +void VideoSender::StopDebugRecording() { + recorder_->Stop(); +} + +void VideoSender::SuspendBelowMinBitrate() { CriticalSectionScoped cs(_sendCritSect); - if (_encoderInputFile != NULL) { - fclose(_encoderInputFile); - _encoderInputFile = NULL; + VideoCodec current_send_codec; + if (SendCodec(¤t_send_codec) != 0) { + assert(false); // Must set a send codec before SuspendBelowMinBitrate. + return; } - return VCM_OK; + int threshold_bps; + if (current_send_codec.numberOfSimulcastStreams == 0) { + threshold_bps = current_send_codec.minBitrate * 1000; + } else { + threshold_bps = current_send_codec.simulcastStream[0].minBitrate * 1000; + } + // Set the hysteresis window to be at 10% of the threshold, but at least + // 10 kbps. + int window_bps = std::max(threshold_bps / 10, 10000); + _mediaOpt.SuspendBelowMinBitrate(threshold_bps, window_bps); +} + +bool VideoSender::VideoSuspended() const { + CriticalSectionScoped cs(_sendCritSect); + return _mediaOpt.IsVideoSuspended(); } void VideoSender::SetCPULoadState(CPULoadState state) { diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_sender_unittest.cc b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_sender_unittest.cc index 9f341b159f95..196eff312378 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_sender_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/video_sender_unittest.cc @@ -11,15 +11,17 @@ #include #include "testing/gtest/include/gtest/gtest.h" -#include "webrtc/common_video/test/frame_generator.h" +#include "webrtc/common.h" #include "webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h" #include "webrtc/modules/video_coding/codecs/vp8/include/vp8_common_types.h" +#include "webrtc/modules/video_coding/codecs/vp8/temporal_layers.h" #include "webrtc/modules/video_coding/main/interface/mock/mock_vcm_callbacks.h" #include "webrtc/modules/video_coding/main/interface/video_coding.h" #include "webrtc/modules/video_coding/main/source/video_coding_impl.h" #include "webrtc/modules/video_coding/main/test/test_util.h" #include "webrtc/system_wrappers/interface/clock.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/test/frame_generator.h" #include "webrtc/test/testsupport/fileutils.h" #include "webrtc/test/testsupport/gtest_disable.h" @@ -31,16 +33,44 @@ using ::testing::Field; using ::testing::NiceMock; using ::testing::Pointee; using ::testing::Return; +using ::testing::FloatEq; using std::vector; using webrtc::test::FrameGenerator; namespace webrtc { namespace vcm { namespace { +enum { + kMaxNumberOfTemporalLayers = 3 +}; + +struct Vp8StreamInfo { + float framerate_fps[kMaxNumberOfTemporalLayers]; + int bitrate_kbps[kMaxNumberOfTemporalLayers]; +}; + +MATCHER_P(MatchesVp8StreamInfo, expected, "") { + bool res = true; + for (int tl = 0; tl < kMaxNumberOfTemporalLayers; ++tl) { + if (abs(expected.framerate_fps[tl] - arg.framerate_fps[tl]) > 0.5) { + *result_listener << " framerate_fps[" << tl + << "] = " << arg.framerate_fps[tl] << " (expected " + << expected.framerate_fps[tl] << ") "; + res = false; + } + if (abs(expected.bitrate_kbps[tl] - arg.bitrate_kbps[tl]) > 10) { + *result_listener << " bitrate_kbps[" << tl + << "] = " << arg.bitrate_kbps[tl] << " (expected " + << expected.bitrate_kbps[tl] << ") "; + res = false; + } + } + return res; +} class EmptyFrameGenerator : public FrameGenerator { public: - virtual I420VideoFrame& NextFrame() OVERRIDE { return frame_; } + I420VideoFrame* NextFrame() OVERRIDE { frame_.ResetSize(); return &frame_; } private: I420VideoFrame frame_; @@ -81,6 +111,15 @@ class PacketizationCallback : public VCMPacketizationCallback { interval_ms(); } + Vp8StreamInfo CalculateVp8StreamInfo() { + Vp8StreamInfo info; + for (int tl = 0; tl < 3; ++tl) { + info.framerate_fps[tl] = FramerateFpsWithinTemporalLayer(tl); + info.bitrate_kbps[tl] = BitrateKbpsWithinTemporalLayer(tl); + } + return info; + } + private: struct FrameData { FrameData() {} @@ -134,18 +173,19 @@ class TestVideoSender : public ::testing::Test { TestVideoSender() : clock_(1000), packetization_callback_(&clock_) {} virtual void SetUp() { - sender_.reset(new VideoSender(0, &clock_)); + sender_.reset(new VideoSender(0, &clock_, &post_encode_callback_)); EXPECT_EQ(0, sender_->InitializeSender()); EXPECT_EQ(0, sender_->RegisterTransportCallback(&packetization_callback_)); } void AddFrame() { assert(generator_.get()); - sender_->AddVideoFrame(generator_->NextFrame(), NULL, NULL); + sender_->AddVideoFrame(*generator_->NextFrame(), NULL, NULL); } SimulatedClock clock_; PacketizationCallback packetization_callback_; + MockEncodedImageCallback post_encode_callback_; scoped_ptr sender_; scoped_ptr generator_; }; @@ -319,6 +359,18 @@ class TestVideoSenderWithVp8 : public TestVideoSender { } } + Vp8StreamInfo SimulateWithFramerate(float framerate) { + const float short_simulation_interval = 5.0; + const float long_simulation_interval = 10.0; + // It appears that this 5 seconds simulation is needed to allow + // bitrate and framerate to stabilize. + InsertFrames(framerate, short_simulation_interval); + packetization_callback_.Reset(); + + InsertFrames(framerate, long_simulation_interval); + return packetization_callback_.CalculateVp8StreamInfo(); + } + protected: VideoCodec codec_; int codec_bitrate_kbps_; @@ -327,51 +379,56 @@ class TestVideoSenderWithVp8 : public TestVideoSender { TEST_F(TestVideoSenderWithVp8, DISABLED_ON_ANDROID(FixedTemporalLayersStrategy)) { - // It appears that this 5 seconds simulation are need to allow - // bitrate and framerate to stabilize. - // TODO(andresp): the framerate calculation should be improved. - double framerate = 30.0; - InsertFrames(framerate, 5.0); - packetization_callback_.Reset(); + const int low_b = codec_bitrate_kbps_ * kVp8LayerRateAlloction[2][0]; + const int mid_b = codec_bitrate_kbps_ * kVp8LayerRateAlloction[2][1]; + const int high_b = codec_bitrate_kbps_ * kVp8LayerRateAlloction[2][2]; + { + Vp8StreamInfo expected = {{7.5, 15.0, 30.0}, {low_b, mid_b, high_b}}; + EXPECT_THAT(SimulateWithFramerate(30.0), MatchesVp8StreamInfo(expected)); + } + { + Vp8StreamInfo expected = {{3.75, 7.5, 15.0}, {low_b, mid_b, high_b}}; + EXPECT_THAT(SimulateWithFramerate(15.0), MatchesVp8StreamInfo(expected)); + } +} - // Need to simulate for 10 seconds due to VP8 bitrate controller. - InsertFrames(framerate, 10.0); - EXPECT_NEAR( - packetization_callback_.FramerateFpsWithinTemporalLayer(2), 30.0, 0.5); - EXPECT_NEAR( - packetization_callback_.FramerateFpsWithinTemporalLayer(1), 15.0, 0.5); - EXPECT_NEAR( - packetization_callback_.FramerateFpsWithinTemporalLayer(0), 7.5, 0.5); - EXPECT_NEAR(packetization_callback_.BitrateKbpsWithinTemporalLayer(2), - codec_bitrate_kbps_ * kVp8LayerRateAlloction[2][2], - 10); - EXPECT_NEAR(packetization_callback_.BitrateKbpsWithinTemporalLayer(1), - codec_bitrate_kbps_ * kVp8LayerRateAlloction[2][1], - 10); - EXPECT_NEAR(packetization_callback_.BitrateKbpsWithinTemporalLayer(0), - codec_bitrate_kbps_ * kVp8LayerRateAlloction[2][0], - 10); +TEST_F(TestVideoSenderWithVp8, + DISABLED_ON_ANDROID(RealTimeTemporalLayersStrategy)) { + Config extra_options; + extra_options.Set( + new RealTimeTemporalLayersFactory()); + VideoCodec codec = MakeVp8VideoCodec(352, 288, 3); + codec.extra_options = &extra_options; + codec.minBitrate = 10; + codec.startBitrate = codec_bitrate_kbps_; + codec.maxBitrate = codec_bitrate_kbps_; + EXPECT_EQ(0, sender_->RegisterSendCodec(&codec, 1, 1200)); - framerate = 15.0; - InsertFrames(framerate, 5.0); - packetization_callback_.Reset(); + const int low_b = codec_bitrate_kbps_ * 0.4; + const int mid_b = codec_bitrate_kbps_ * 0.6; + const int high_b = codec_bitrate_kbps_; - InsertFrames(15.0, 10.0); - EXPECT_NEAR( - packetization_callback_.FramerateFpsWithinTemporalLayer(2), 15.0, 0.5); - EXPECT_NEAR( - packetization_callback_.FramerateFpsWithinTemporalLayer(1), 7.5, 0.5); - EXPECT_NEAR( - packetization_callback_.FramerateFpsWithinTemporalLayer(0), 3.75, 0.5); - EXPECT_NEAR(packetization_callback_.BitrateKbpsWithinTemporalLayer(2), - codec_bitrate_kbps_ * kVp8LayerRateAlloction[2][2], - 10); - EXPECT_NEAR(packetization_callback_.BitrateKbpsWithinTemporalLayer(1), - codec_bitrate_kbps_ * kVp8LayerRateAlloction[2][1], - 10); - EXPECT_NEAR(packetization_callback_.BitrateKbpsWithinTemporalLayer(0), - codec_bitrate_kbps_ * kVp8LayerRateAlloction[2][0], - 10); + { + Vp8StreamInfo expected = {{7.5, 15.0, 30.0}, {low_b, mid_b, high_b}}; + EXPECT_THAT(SimulateWithFramerate(30.0), MatchesVp8StreamInfo(expected)); + } + { + Vp8StreamInfo expected = {{5.0, 10.0, 20.0}, {low_b, mid_b, high_b}}; + EXPECT_THAT(SimulateWithFramerate(20.0), MatchesVp8StreamInfo(expected)); + } + { + Vp8StreamInfo expected = {{7.5, 15.0, 15.0}, {mid_b, high_b, high_b}}; + EXPECT_THAT(SimulateWithFramerate(15.0), MatchesVp8StreamInfo(expected)); + } + { + Vp8StreamInfo expected = {{5.0, 10.0, 10.0}, {mid_b, high_b, high_b}}; + EXPECT_THAT(SimulateWithFramerate(10.0), MatchesVp8StreamInfo(expected)); + } + { + // TODO(andresp): Find out why this fails with framerate = 7.5 + Vp8StreamInfo expected = {{7.0, 7.0, 7.0}, {high_b, high_b, high_b}}; + EXPECT_THAT(SimulateWithFramerate(7.0), MatchesVp8StreamInfo(expected)); + } } } // namespace } // namespace vcm diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/test/jitter_estimate_test.h b/media/webrtc/trunk/webrtc/modules/video_coding/main/test/jitter_estimate_test.h index 086ec0c3d151..9318a491210d 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/test/jitter_estimate_test.h +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/test/jitter_estimate_test.h @@ -11,7 +11,7 @@ #ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_JITTER_ESTIMATE_TEST_H_ #define WEBRTC_MODULES_VIDEO_CODING_TEST_JITTER_ESTIMATE_TEST_H_ -#include "typedefs.h" +#include "webrtc/typedefs.h" #include "jitter_buffer.h" #include "jitter_estimator.h" #include diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/test/quality_modes_test.cc b/media/webrtc/trunk/webrtc/modules/video_coding/main/test/quality_modes_test.cc index 31242774b0f8..0dbf6fdfe85a 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/test/quality_modes_test.cc +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/test/quality_modes_test.cc @@ -249,7 +249,7 @@ QualityModesTest::Perform(const CmdArgs& args) VideoContentMetrics* contentMetrics = NULL; // setting user frame rate - _vpm->SetMaxFrameRate((uint32_t)(_nativeFrameRate+ 0.5f)); + _vpm->SetMaxFramerate((uint32_t)(_nativeFrameRate+ 0.5f)); // for starters: keeping native values: _vpm->SetTargetResolution(_width, _height, (uint32_t)(_frameRate+ 0.5f)); diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/test/rtp_player.cc b/media/webrtc/trunk/webrtc/modules/video_coding/main/test/rtp_player.cc index 6af4389341a9..4c157a71684c 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/test/rtp_player.cc +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/test/rtp_player.cc @@ -203,7 +203,7 @@ class SsrcHandlers { } } - int RegisterSsrc(uint32_t ssrc, LostPackets* lost_packets) { + int RegisterSsrc(uint32_t ssrc, LostPackets* lost_packets, Clock* clock) { if (handlers_.count(ssrc) > 0) { return 0; } @@ -217,6 +217,7 @@ class SsrcHandlers { } RtpRtcp::Configuration configuration; + configuration.clock = clock; configuration.id = 1; configuration.audio = false; handler->rtp_module_.reset(RtpReceiver::CreateVideoReceiver( @@ -326,7 +327,6 @@ class RtpPlayerImpl : public RtpPlayerInterface { float loss_rate, uint32_t rtt_ms, bool reordering) : ssrc_handlers_(payload_sink_factory, payload_types), clock_(clock), - packet_source_(NULL), next_rtp_time_(0), first_packet_(true), first_packet_rtp_time_(0), @@ -435,7 +435,7 @@ class RtpPlayerImpl : public RtpPlayerInterface { return -1; } uint32_t ssrc = header.ssrc; - if (ssrc_handlers_.RegisterSsrc(ssrc, &lost_packets_) < 0) { + if (ssrc_handlers_.RegisterSsrc(ssrc, &lost_packets_, clock_) < 0) { DEBUG_LOG1("Unable to register ssrc: %d", ssrc); return -1; } diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/interface/video_processing.h b/media/webrtc/trunk/webrtc/modules/video_processing/main/interface/video_processing.h index 094900d8616c..b3e0483d06ae 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/interface/video_processing.h +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/interface/video_processing.h @@ -24,281 +24,274 @@ #include "webrtc/modules/video_processing/main/interface/video_processing_defines.h" /** - The module is largely intended to process video streams, except functionality + The module is largely intended to process video streams, except functionality provided by static functions which operate independent of previous frames. It - is recommended, but not required that a unique instance be used for each + is recommended, but not required that a unique instance be used for each concurrently processed stream. Similarly, it is recommended to call Reset() before switching to a new stream, but this is not absolutely required. - + The module provides basic thread safety by permitting only a single function to execute concurrently. */ namespace webrtc { -class VideoProcessingModule : public Module -{ -public: - /** - Structure to hold frame statistics. Populate it with GetFrameStats(). - */ - struct FrameStats - { - FrameStats() : - mean(0), - sum(0), - numPixels(0), - subSamplWidth(0), - subSamplHeight(0) - { - memset(hist, 0, sizeof(hist)); - } - - uint32_t hist[256]; /**< Histogram of frame */ - uint32_t mean; /**< Mean value of frame */ - uint32_t sum; /**< Sum of frame */ - uint32_t numPixels; /**< Number of pixels */ - uint8_t subSamplWidth; /**< Subsampling rate of width in powers - of 2 */ - uint8_t subSamplHeight; /**< Subsampling rate of height in powers - of 2 */ - }; - - /** - Specifies the warning types returned by BrightnessDetection(). - */ - enum BrightnessWarning - { - kNoWarning, /**< Frame has acceptable brightness */ - kDarkWarning, /**< Frame is too dark */ - kBrightWarning /**< Frame is too bright */ - }; - - /* - Creates a VPM object. - - \param[in] id - Unique identifier of this object. - - \return Pointer to a VPM object. - */ - static VideoProcessingModule* Create(int32_t id); - - /** - Destroys a VPM object. - - \param[in] module - Pointer to the VPM object to destroy. - */ - static void Destroy(VideoProcessingModule* module); - - /** - Not supported. - */ - virtual int32_t TimeUntilNextProcess() { return -1; } - - /** - Not supported. - */ - virtual int32_t Process() { return -1; } - - /** - Resets all processing components to their initial states. This should be - called whenever a new video stream is started. - */ - virtual void Reset() = 0; - - /** - Retrieves statistics for the input frame. This function must be used to - prepare a FrameStats struct for use in certain VPM functions. - - \param[out] stats - The frame statistics will be stored here on return. - - \param[in] frame - Reference to the video frame. - - \return 0 on success, -1 on failure. - */ - static int32_t GetFrameStats(FrameStats* stats, - const I420VideoFrame& frame); - - /** - Checks the validity of a FrameStats struct. Currently, valid implies only - that is had changed from its initialized state. - - \param[in] stats - Frame statistics. - - \return True on valid stats, false on invalid stats. - */ - static bool ValidFrameStats(const FrameStats& stats); - - /** - Returns a FrameStats struct to its intialized state. - - \param[in,out] stats - Frame statistics. - */ - static void ClearFrameStats(FrameStats* stats); - - /** - Enhances the color of an image through a constant mapping. Only the - chrominance is altered. Has a fixed-point implementation. - - \param[in,out] frame - Pointer to the video frame. - */ - static int32_t ColorEnhancement(I420VideoFrame* frame); - - /** - Increases/decreases the luminance value. - - \param[in,out] frame - Pointer to the video frame. - - \param[in] delta - The amount to change the chrominance value of every single pixel. - Can be < 0 also. - - \return 0 on success, -1 on failure. - */ - static int32_t Brighten(I420VideoFrame* frame, int delta); - - /** - Detects and removes camera flicker from a video stream. Every frame from - the stream must be passed in. A frame will only be altered if flicker has - been detected. Has a fixed-point implementation. - - \param[in,out] frame - Pointer to the video frame. - - \param[in,out] stats - Frame statistics provided by GetFrameStats(). On return the stats will - be reset to zero if the frame was altered. Call GetFrameStats() again - if the statistics for the altered frame are required. - - \return 0 on success, -1 on failure. - */ - virtual int32_t Deflickering(I420VideoFrame* frame, FrameStats* stats) = 0; - - /** - Denoises a video frame. Every frame from the stream should be passed in. - Has a fixed-point implementation. - - \param[in,out] frame - Pointer to the video frame. - - \return The number of modified pixels on success, -1 on failure. - */ - virtual int32_t Denoising(I420VideoFrame* frame) = 0; - - /** - Detects if a video frame is excessively bright or dark. Returns a - warning if this is the case. Multiple frames should be passed in before - expecting a warning. Has a floating-point implementation. - - \param[in] frame - Pointer to the video frame. - - \param[in] stats - Frame statistics provided by GetFrameStats(). - - \return A member of BrightnessWarning on success, -1 on error - */ - virtual int32_t BrightnessDetection(const I420VideoFrame& frame, - const FrameStats& stats) = 0; - - /** - The following functions refer to the pre-processor unit within VPM. The - pre-processor perfoms spatial/temporal decimation and content analysis on - the frames prior to encoding. - */ - - /** - Enable/disable temporal decimation - - \param[in] enable when true, temporal decimation is enabled - */ - virtual void EnableTemporalDecimation(bool enable) = 0; - - /** - Set target resolution - - \param[in] width - Target width - - \param[in] height - Target height - - \param[in] frameRate - Target frameRate - - \return VPM_OK on success, a negative value on error (see error codes) - - */ - virtual int32_t SetTargetResolution(uint32_t width, - uint32_t height, - uint32_t frameRate) = 0; - - /** - Set max frame rate - \param[in] maxFrameRate: maximum frame rate (limited to native frame rate) - - \return VPM_OK on success, a negative value on error (see error codes) - */ - virtual int32_t SetMaxFrameRate(uint32_t maxFrameRate) = 0; - - /** - Get decimated(target) frame rate - */ - virtual uint32_t DecimatedFrameRate() = 0; - - /** - Get decimated(target) frame width - */ - virtual uint32_t DecimatedWidth() const = 0; - - /** - Get decimated(target) frame height - */ - virtual uint32_t DecimatedHeight() const = 0 ; - - /** - Set the spatial resampling settings of the VPM: The resampler may either be - disabled or one of the following: - scaling to a close to target dimension followed by crop/pad - - \param[in] resamplingMode - Set resampling mode (a member of VideoFrameResampling) - */ - virtual void SetInputFrameResampleMode(VideoFrameResampling - resamplingMode) = 0; - - /** - Get Processed (decimated) frame - - \param[in] frame pointer to the video frame. - \param[in] processedFrame pointer (double) to the processed frame. If no - processing is required, processedFrame will be NULL. - - \return VPM_OK on success, a negative value on error (see error codes) - */ - virtual int32_t PreprocessFrame(const I420VideoFrame& frame, - I420VideoFrame** processedFrame) = 0; - - /** - Return content metrics for the last processed frame - */ - virtual VideoContentMetrics* ContentMetrics() const = 0 ; - - /** - Enable content analysis - */ - virtual void EnableContentAnalysis(bool enable) = 0; +class VideoProcessingModule : public Module { + public: + /** + Structure to hold frame statistics. Populate it with GetFrameStats(). + */ + struct FrameStats { + FrameStats() : + mean(0), + sum(0), + num_pixels(0), + subSamplWidth(0), + subSamplHeight(0) { + memset(hist, 0, sizeof(hist)); + } + uint32_t hist[256]; // FRame histogram. + uint32_t mean; // Frame Mean value. + uint32_t sum; // Sum of frame. + uint32_t num_pixels; // Number of pixels. + uint8_t subSamplWidth; // Subsampling rate of width in powers of 2. + uint8_t subSamplHeight; // Subsampling rate of height in powers of 2. }; -} // namespace + /** + Specifies the warning types returned by BrightnessDetection(). + */ + enum BrightnessWarning { + kNoWarning, // Frame has acceptable brightness. + kDarkWarning, // Frame is too dark. + kBrightWarning // Frame is too bright. + }; -#endif + /* + Creates a VPM object. + + \param[in] id + Unique identifier of this object. + + \return Pointer to a VPM object. + */ + static VideoProcessingModule* Create(int32_t id); + + /** + Destroys a VPM object. + + \param[in] module + Pointer to the VPM object to destroy. + */ + static void Destroy(VideoProcessingModule* module); + + /** + Not supported. + */ + virtual int32_t TimeUntilNextProcess() { return -1; } + + /** + Not supported. + */ + virtual int32_t Process() { return -1; } + + /** + Resets all processing components to their initial states. This should be + called whenever a new video stream is started. + */ + virtual void Reset() = 0; + + /** + Retrieves statistics for the input frame. This function must be used to + prepare a FrameStats struct for use in certain VPM functions. + + \param[out] stats + The frame statistics will be stored here on return. + + \param[in] frame + Reference to the video frame. + + \return 0 on success, -1 on failure. + */ + static int32_t GetFrameStats(FrameStats* stats, + const I420VideoFrame& frame); + + /** + Checks the validity of a FrameStats struct. Currently, valid implies only + that is had changed from its initialized state. + + \param[in] stats + Frame statistics. + + \return True on valid stats, false on invalid stats. + */ + static bool ValidFrameStats(const FrameStats& stats); + + /** + Returns a FrameStats struct to its intialized state. + + \param[in,out] stats + Frame statistics. + */ + static void ClearFrameStats(FrameStats* stats); + + /** + Enhances the color of an image through a constant mapping. Only the + chrominance is altered. Has a fixed-point implementation. + + \param[in,out] frame + Pointer to the video frame. + */ + static int32_t ColorEnhancement(I420VideoFrame* frame); + + /** + Increases/decreases the luminance value. + + \param[in,out] frame + Pointer to the video frame. + + \param[in] delta + The amount to change the chrominance value of every single pixel. + Can be < 0 also. + + \return 0 on success, -1 on failure. + */ + static int32_t Brighten(I420VideoFrame* frame, int delta); + + /** + Detects and removes camera flicker from a video stream. Every frame from + the stream must be passed in. A frame will only be altered if flicker has + been detected. Has a fixed-point implementation. + + \param[in,out] frame + Pointer to the video frame. + + \param[in,out] stats + Frame statistics provided by GetFrameStats(). On return the stats will + be reset to zero if the frame was altered. Call GetFrameStats() again + if the statistics for the altered frame are required. + + \return 0 on success, -1 on failure. + */ + virtual int32_t Deflickering(I420VideoFrame* frame, FrameStats* stats) = 0; + + /** + Denoises a video frame. Every frame from the stream should be passed in. + Has a fixed-point implementation. + + \param[in,out] frame + Pointer to the video frame. + + \return The number of modified pixels on success, -1 on failure. + */ + virtual int32_t Denoising(I420VideoFrame* frame) = 0; + + /** + Detects if a video frame is excessively bright or dark. Returns a + warning if this is the case. Multiple frames should be passed in before + expecting a warning. Has a floating-point implementation. + + \param[in] frame + Pointer to the video frame. + + \param[in] stats + Frame statistics provided by GetFrameStats(). + + \return A member of BrightnessWarning on success, -1 on error + */ + virtual int32_t BrightnessDetection(const I420VideoFrame& frame, + const FrameStats& stats) = 0; + + /** + The following functions refer to the pre-processor unit within VPM. The + pre-processor perfoms spatial/temporal decimation and content analysis on + the frames prior to encoding. + */ + + /** + Enable/disable temporal decimation + + \param[in] enable when true, temporal decimation is enabled + */ + virtual void EnableTemporalDecimation(bool enable) = 0; + + /** + Set target resolution + + \param[in] width + Target width + + \param[in] height + Target height + + \param[in] frame_rate + Target frame_rate + + \return VPM_OK on success, a negative value on error (see error codes) + + */ + virtual int32_t SetTargetResolution(uint32_t width, + uint32_t height, + uint32_t frame_rate) = 0; + + /** + Set max frame rate + \param[in] max_frame_rate: maximum frame rate (limited to native frame rate) + + \return VPM_OK on success, a negative value on error (see error codes) + */ + virtual int32_t SetMaxFramerate(uint32_t max_frame_rate) = 0; + + /** + Get decimated(target) frame rate + */ + virtual uint32_t Decimatedframe_rate() = 0; + + /** + Get decimated(target) frame width + */ + virtual uint32_t DecimatedWidth() const = 0; + + /** + Get decimated(target) frame height + */ + virtual uint32_t DecimatedHeight() const = 0 ; + + /** + Set the spatial resampling settings of the VPM: The resampler may either be + disabled or one of the following: + scaling to a close to target dimension followed by crop/pad + + \param[in] resampling_mode + Set resampling mode (a member of VideoFrameResampling) + */ + virtual void SetInputFrameResampleMode(VideoFrameResampling + resampling_mode) = 0; + + /** + Get Processed (decimated) frame + + \param[in] frame pointer to the video frame. + \param[in] processed_frame pointer (double) to the processed frame. If no + processing is required, processed_frame will be NULL. + + \return VPM_OK on success, a negative value on error (see error codes) + */ + virtual int32_t PreprocessFrame(const I420VideoFrame& frame, + I420VideoFrame** processed_frame) = 0; + + /** + Return content metrics for the last processed frame + */ + virtual VideoContentMetrics* ContentMetrics() const = 0 ; + + /** + Enable content analysis + */ + virtual void EnableContentAnalysis(bool enable) = 0; +}; + +} // namespace webrtc + +#endif // WEBRTC_MODULES_INTERFACE_VIDEO_PROCESSING_H diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/interface/video_processing_defines.h b/media/webrtc/trunk/webrtc/modules/video_processing/main/interface/video_processing_defines.h index 7a3536c72c58..93a065896658 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/interface/video_processing_defines.h +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/interface/video_processing_defines.h @@ -29,15 +29,13 @@ namespace webrtc { #define VPM_UNINITIALIZED -5 #define VPM_UNIMPLEMENTED -6 -enum VideoFrameResampling -{ - // TODO: Do we still need crop/pad? - kNoRescaling, // disables rescaling - kFastRescaling, // point - kBiLinear, // bi-linear interpolation - kBox, // Box inteprolation +enum VideoFrameResampling { + kNoRescaling, // Disables rescaling. + kFastRescaling, // Point filter. + kBiLinear, // Bi-linear interpolation. + kBox, // Box inteprolation. }; -} // namespace +} // namespace webrtc -#endif +#endif // WEBRTC_MODULES_INTERFACE_VIDEO_PROCESSING_DEFINES_H diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/brighten.cc b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/brighten.cc index 8fa23c275996..ffabbf7fff78 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/brighten.cc +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/brighten.cc @@ -31,19 +31,19 @@ int32_t Brighten(I420VideoFrame* frame, int delta) { return VPM_PARAMETER_ERROR; } - int numPixels = frame->width() * frame->height(); + int num_pixels = frame->width() * frame->height(); - int lookUp[256]; + int look_up[256]; for (int i = 0; i < 256; i++) { int val = i + delta; - lookUp[i] = ((((val < 0) ? 0 : val) > 255) ? 255 : val); + look_up[i] = ((((val < 0) ? 0 : val) > 255) ? 255 : val); } - uint8_t* tempPtr = frame->buffer(kYPlane); + uint8_t* temp_ptr = frame->buffer(kYPlane); - for (int i = 0; i < numPixels; i++) { - *tempPtr = static_cast(lookUp[*tempPtr]); - tempPtr++; + for (int i = 0; i < num_pixels; i++) { + *temp_ptr = static_cast(look_up[*temp_ptr]); + temp_ptr++; } return VPM_OK; } diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/brightness_detection.cc b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/brightness_detection.cc index 1e7e939dbfd5..8817bac434b2 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/brightness_detection.cc +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/brightness_detection.cc @@ -17,180 +17,128 @@ namespace webrtc { VPMBrightnessDetection::VPMBrightnessDetection() : - _id(0) -{ - Reset(); + id_(0) { + Reset(); } -VPMBrightnessDetection::~VPMBrightnessDetection() -{ +VPMBrightnessDetection::~VPMBrightnessDetection() {} + +int32_t VPMBrightnessDetection::ChangeUniqueId(const int32_t id) { + id_ = id; + return VPM_OK; } -int32_t -VPMBrightnessDetection::ChangeUniqueId(const int32_t id) -{ - _id = id; - return VPM_OK; +void VPMBrightnessDetection::Reset() { + frame_cnt_bright_ = 0; + frame_cnt_dark_ = 0; } -void -VPMBrightnessDetection::Reset() -{ - _frameCntBright = 0; - _frameCntDark = 0; -} +int32_t VPMBrightnessDetection::ProcessFrame( + const I420VideoFrame& frame, + const VideoProcessingModule::FrameStats& stats) { + if (frame.IsZeroSize()) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_, + "Null frame pointer"); + return VPM_PARAMETER_ERROR; + } + int width = frame.width(); + int height = frame.height(); -int32_t -VPMBrightnessDetection::ProcessFrame(const I420VideoFrame& frame, - const VideoProcessingModule::FrameStats& - stats) -{ - if (frame.IsZeroSize()) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, - "Null frame pointer"); - return VPM_PARAMETER_ERROR; - } - int width = frame.width(); - int height = frame.height(); + if (!VideoProcessingModule::ValidFrameStats(stats)) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_, + "Invalid frame stats"); + return VPM_PARAMETER_ERROR; + } - if (!VideoProcessingModule::ValidFrameStats(stats)) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, - "Invalid frame stats"); - return VPM_PARAMETER_ERROR; - } + const uint8_t frame_cnt_alarm = 2; - const uint8_t frameCntAlarm = 2; + // Get proportion in lowest bins. + uint8_t low_th = 20; + float prop_low = 0; + for (uint32_t i = 0; i < low_th; i++) { + prop_low += stats.hist[i]; + } +prop_low /= stats.num_pixels; - // Get proportion in lowest bins - uint8_t lowTh = 20; - float propLow = 0; - for (uint32_t i = 0; i < lowTh; i++) - { - propLow += stats.hist[i]; - } - propLow /= stats.numPixels; + // Get proportion in highest bins. + unsigned char high_th = 230; + float prop_high = 0; + for (uint32_t i = high_th; i < 256; i++) { + prop_high += stats.hist[i]; + } + prop_high /= stats.num_pixels; - // Get proportion in highest bins - unsigned char highTh = 230; - float propHigh = 0; - for (uint32_t i = highTh; i < 256; i++) - { - propHigh += stats.hist[i]; - } - propHigh /= stats.numPixels; + if (prop_high < 0.4) { + if (stats.mean < 90 || stats.mean > 170) { + // Standard deviation of Y + const uint8_t* buffer = frame.buffer(kYPlane); + float std_y = 0; + for (int h = 0; h < height; h += (1 << stats.subSamplHeight)) { + int row = h*width; + for (int w = 0; w < width; w += (1 << stats.subSamplWidth)) { + std_y += (buffer[w + row] - stats.mean) * (buffer[w + row] - + stats.mean); + } + } + std_y = sqrt(std_y / stats.num_pixels); - if(propHigh < 0.4) - { - if (stats.mean < 90 || stats.mean > 170) - { - // Standard deviation of Y - const uint8_t* buffer = frame.buffer(kYPlane); - float stdY = 0; - for (int h = 0; h < height; h += (1 << stats.subSamplHeight)) - { - int row = h*width; - for (int w = 0; w < width; w += (1 << stats.subSamplWidth)) - { - stdY += (buffer[w + row] - stats.mean) * (buffer[w + row] - - stats.mean); - } - } - stdY = sqrt(stdY / stats.numPixels); - - // Get percentiles - uint32_t sum = 0; - uint32_t medianY = 140; - uint32_t perc05 = 0; - uint32_t perc95 = 255; - float posPerc05 = stats.numPixels * 0.05f; - float posMedian = stats.numPixels * 0.5f; - float posPerc95 = stats.numPixels * 0.95f; - for (uint32_t i = 0; i < 256; i++) - { - sum += stats.hist[i]; - - if (sum < posPerc05) - { - perc05 = i; // 5th perc - } - if (sum < posMedian) - { - medianY = i; // 50th perc - } - if (sum < posPerc95) - { - perc95 = i; // 95th perc - } - else - { - break; - } - } - - // Check if image is too dark - if ((stdY < 55) && (perc05 < 50)) - { - if (medianY < 60 || stats.mean < 80 || perc95 < 130 || - propLow > 0.20) - { - _frameCntDark++; - } - else - { - _frameCntDark = 0; - } - } - else - { - _frameCntDark = 0; - } - - // Check if image is too bright - if ((stdY < 52) && (perc95 > 200) && (medianY > 160)) - { - if (medianY > 185 || stats.mean > 185 || perc05 > 140 || - propHigh > 0.25) - { - _frameCntBright++; - } - else - { - _frameCntBright = 0; - } - } - else - { - _frameCntBright = 0; - } - - } + // Get percentiles. + uint32_t sum = 0; + uint32_t median_y = 140; + uint32_t perc05 = 0; + uint32_t perc95 = 255; + float pos_perc05 = stats.num_pixels * 0.05f; + float pos_median = stats.num_pixels * 0.5f; + float posPerc95 = stats.num_pixels * 0.95f; + for (uint32_t i = 0; i < 256; i++) { + sum += stats.hist[i]; + if (sum < pos_perc05) perc05 = i; // 5th perc. + if (sum < pos_median) median_y = i; // 50th perc. + if (sum < posPerc95) + perc95 = i; // 95th perc. else - { - _frameCntDark = 0; - _frameCntBright = 0; + break; + } + + // Check if image is too dark + if ((std_y < 55) && (perc05 < 50)) { + if (median_y < 60 || stats.mean < 80 || perc95 < 130 || + prop_low > 0.20) { + frame_cnt_dark_++; + } else { + frame_cnt_dark_ = 0; + } + } else { + frame_cnt_dark_ = 0; } - } - else - { - _frameCntBright++; - _frameCntDark = 0; - } - - if (_frameCntDark > frameCntAlarm) - { - return VideoProcessingModule::kDarkWarning; - } - else if (_frameCntBright > frameCntAlarm) - { - return VideoProcessingModule::kBrightWarning; - } - else - { - return VideoProcessingModule::kNoWarning; + // Check if image is too bright + if ((std_y < 52) && (perc95 > 200) && (median_y > 160)) { + if (median_y > 185 || stats.mean > 185 || perc05 > 140 || + prop_high > 0.25) { + frame_cnt_bright_++; + } else { + frame_cnt_bright_ = 0; + } + } else { + frame_cnt_bright_ = 0; + } + } else { + frame_cnt_dark_ = 0; + frame_cnt_bright_ = 0; } + } else { + frame_cnt_bright_++; + frame_cnt_dark_ = 0; + } + + if (frame_cnt_dark_ > frame_cnt_alarm) { + return VideoProcessingModule::kDarkWarning; + } else if (frame_cnt_bright_ > frame_cnt_alarm) { + return VideoProcessingModule::kBrightWarning; + } else { + return VideoProcessingModule::kNoWarning; + } } -} // namespace +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/brightness_detection.h b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/brightness_detection.h index 22248d60c3f0..dcb366b8bfb0 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/brightness_detection.h +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/brightness_detection.h @@ -11,34 +11,30 @@ /* * brightness_detection.h */ -#ifndef VPM_BRIGHTNESS_DETECTION_H -#define VPM_BRIGHTNESS_DETECTION_H - +#ifndef MODULES_VIDEO_PROCESSING_MAIN_SOURCE_BRIGHTNESS_DETECTION_H +#define MODULES_VIDEO_PROCESSING_MAIN_SOURCE_BRIGHTNESS_DETECTION_H #include "webrtc/modules/video_processing/main/interface/video_processing.h" #include "webrtc/typedefs.h" namespace webrtc { -class VPMBrightnessDetection -{ -public: - VPMBrightnessDetection(); - ~VPMBrightnessDetection(); +class VPMBrightnessDetection { + public: + VPMBrightnessDetection(); + ~VPMBrightnessDetection(); + int32_t ChangeUniqueId(int32_t id); - int32_t ChangeUniqueId(int32_t id); + void Reset(); + int32_t ProcessFrame(const I420VideoFrame& frame, + const VideoProcessingModule::FrameStats& stats); - void Reset(); + private: + int32_t id_; - int32_t ProcessFrame(const I420VideoFrame& frame, - const VideoProcessingModule::FrameStats& stats); - -private: - int32_t _id; - - uint32_t _frameCntBright; - uint32_t _frameCntDark; + uint32_t frame_cnt_bright_; + uint32_t frame_cnt_dark_; }; -} // namespace +} // namespace webrtc -#endif // VPM_BRIGHTNESS_DETECTION_H +#endif // MODULES_VIDEO_PROCESSING_MAIN_SOURCE_BRIGHTNESS_DETECTION_H diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/color_enhancement.cc b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/color_enhancement.cc index 87a594b612ae..eeec01659c0f 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/color_enhancement.cc +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/color_enhancement.cc @@ -15,51 +15,42 @@ #include "webrtc/system_wrappers/interface/trace.h" namespace webrtc { +namespace VideoProcessing { -namespace VideoProcessing -{ - int32_t - ColorEnhancement(I420VideoFrame* frame) - { - assert(frame); - // pointers to U and V color pixels - uint8_t* ptrU; - uint8_t* ptrV; - uint8_t tempChroma; +int32_t ColorEnhancement(I420VideoFrame* frame) { +assert(frame); +// Pointers to U and V color pixels. +uint8_t* ptr_u; +uint8_t* ptr_v; +uint8_t temp_chroma; +if (frame->IsZeroSize()) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, + -1, "Null frame pointer"); + return VPM_GENERAL_ERROR; +} - if (frame->IsZeroSize()) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, - -1, "Null frame pointer"); - return VPM_GENERAL_ERROR; - } +if (frame->width() == 0 || frame->height() == 0) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, + -1, "Invalid frame size"); + return VPM_GENERAL_ERROR; +} - if (frame->width() == 0 || frame->height() == 0) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, - -1, "Invalid frame size"); - return VPM_GENERAL_ERROR; - } +// Set pointers to first U and V pixels (skip luminance). +ptr_u = frame->buffer(kUPlane); +ptr_v = frame->buffer(kVPlane); +int size_uv = ((frame->width() + 1) / 2) * ((frame->height() + 1) / 2); - // set pointers to first U and V pixels (skip luminance) - ptrU = frame->buffer(kUPlane); - ptrV = frame->buffer(kVPlane); - int size_uv = ((frame->width() + 1) / 2) * ((frame->height() + 1) / 2); +// Loop through all chrominance pixels and modify color. +for (int ix = 0; ix < size_uv; ix++) { + temp_chroma = colorTable[*ptr_u][*ptr_v]; + *ptr_v = colorTable[*ptr_v][*ptr_u]; + *ptr_u = temp_chroma; - // loop through all chrominance pixels and modify color - for (int ix = 0; ix < size_uv; ix++) - { - tempChroma = colorTable[*ptrU][*ptrV]; - *ptrV = colorTable[*ptrV][*ptrU]; - *ptrU = tempChroma; - - // increment pointers - ptrU++; - ptrV++; - } - return VPM_OK; - } - -} // namespace + ptr_u++; + ptr_v++; +} +return VPM_OK; +} +} // namespace VideoProcessing } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/color_enhancement.h b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/color_enhancement.h index f8baaa770e73..233a47fe83dd 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/color_enhancement.h +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/color_enhancement.h @@ -11,19 +11,18 @@ /* * color_enhancement.h */ -#ifndef VPM_COLOR_ENHANCEMENT_H -#define VPM_COLOR_ENHANCEMENT_H +#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_COLOR_ENHANCEMENT_H +#define WEBRTC_MODULES_VIDEO_PROCESSING_COLOR_ENHANCEMENT_H #include "webrtc/modules/video_processing/main/interface/video_processing.h" #include "webrtc/typedefs.h" namespace webrtc { +namespace VideoProcessing { -namespace VideoProcessing -{ - int32_t ColorEnhancement(I420VideoFrame* frame); -} +int32_t ColorEnhancement(I420VideoFrame* frame); -} // namespace +} // namespace VideoProcessing +} // namespace webrtc -#endif // VPM_COLOR_ENHANCEMENT_H +#endif // WEBRTC_MODULES_VIDEO_PROCESSING_COLOR_ENHANCEMENT_H diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/color_enhancement_private.h b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/color_enhancement_private.h index d4aadfcab16f..e5789105a8b3 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/color_enhancement_private.h +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/color_enhancement_private.h @@ -1,12 +1,23 @@ -#ifndef VPM_COLOR_ENHANCEMENT_PRIVATE_H -#define VPM_COLOR_ENHANCEMENT_PRIVATE_H +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_COLOR_ENHANCEMENT_PRIVATE_H_ +#define WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_COLOR_ENHANCEMENT_PRIVATE_H_ #include "webrtc/typedefs.h" namespace webrtc { +namespace VideoProcessing { -//Table created with Matlab script createTable.m -//Usage: +// Table created with Matlab script createTable.m +// Usage: // Umod=colorTable[U][V] // Vmod=colorTable[V][U] static const uint8_t colorTable[256][256] = { @@ -268,6 +279,8 @@ static const uint8_t colorTable[256][256] = { {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255} }; -} // namespace -#endif // VPM_COLOR_ENHANCEMENT_PRIVATE_H +} // namespace VideoProcessing +} // namespace webrtc + +#endif // WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_COLOR_ENHANCEMENT_PRIVATE_H_ diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/content_analysis.cc b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/content_analysis.cc index a500991d5430..25fcb5431a01 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/content_analysis.cc +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/content_analysis.cc @@ -17,229 +17,185 @@ namespace webrtc { -VPMContentAnalysis::VPMContentAnalysis(bool runtime_cpu_detection): -_origFrame(NULL), -_prevFrame(NULL), -_width(0), -_height(0), -_skipNum(1), -_border(8), -_motionMagnitude(0.0f), -_spatialPredErr(0.0f), -_spatialPredErrH(0.0f), -_spatialPredErrV(0.0f), -_firstFrame(true), -_CAInit(false), -_cMetrics(NULL) -{ - ComputeSpatialMetrics = &VPMContentAnalysis::ComputeSpatialMetrics_C; - TemporalDiffMetric = &VPMContentAnalysis::TemporalDiffMetric_C; +VPMContentAnalysis::VPMContentAnalysis(bool runtime_cpu_detection) + : orig_frame_(NULL), + prev_frame_(NULL), + width_(0), + height_(0), + skip_num_(1), + border_(8), + motion_magnitude_(0.0f), + spatial_pred_err_(0.0f), + spatial_pred_err_h_(0.0f), + spatial_pred_err_v_(0.0f), + first_frame_(true), + ca_Init_(false), + content_metrics_(NULL) { + ComputeSpatialMetrics = &VPMContentAnalysis::ComputeSpatialMetrics_C; + TemporalDiffMetric = &VPMContentAnalysis::TemporalDiffMetric_C; - if (runtime_cpu_detection) - { + if (runtime_cpu_detection) { #if defined(WEBRTC_ARCH_X86_FAMILY) - if (WebRtc_GetCPUInfo(kSSE2)) - { - ComputeSpatialMetrics = - &VPMContentAnalysis::ComputeSpatialMetrics_SSE2; - TemporalDiffMetric = &VPMContentAnalysis::TemporalDiffMetric_SSE2; - } + if (WebRtc_GetCPUInfo(kSSE2)) { + ComputeSpatialMetrics = &VPMContentAnalysis::ComputeSpatialMetrics_SSE2; + TemporalDiffMetric = &VPMContentAnalysis::TemporalDiffMetric_SSE2; + } #endif - } - - Release(); + } + Release(); } -VPMContentAnalysis::~VPMContentAnalysis() -{ - Release(); +VPMContentAnalysis::~VPMContentAnalysis() { + Release(); } -VideoContentMetrics* -VPMContentAnalysis::ComputeContentMetrics(const I420VideoFrame& inputFrame) -{ - if (inputFrame.IsZeroSize()) - { - return NULL; - } +VideoContentMetrics* VPMContentAnalysis::ComputeContentMetrics( + const I420VideoFrame& inputFrame) { + if (inputFrame.IsZeroSize()) + return NULL; - // Init if needed (native dimension change) - if (_width != inputFrame.width() || _height != inputFrame.height()) - { - if (VPM_OK != Initialize(inputFrame.width(), inputFrame.height())) - { - return NULL; - } - } - // Only interested in the Y plane. - _origFrame = inputFrame.buffer(kYPlane); + // Init if needed (native dimension change). + if (width_ != inputFrame.width() || height_ != inputFrame.height()) { + if (VPM_OK != Initialize(inputFrame.width(), inputFrame.height())) + return NULL; + } + // Only interested in the Y plane. + orig_frame_ = inputFrame.buffer(kYPlane); - // compute spatial metrics: 3 spatial prediction errors - (this->*ComputeSpatialMetrics)(); + // Compute spatial metrics: 3 spatial prediction errors. + (this->*ComputeSpatialMetrics)(); - // compute motion metrics - if (_firstFrame == false) - ComputeMotionMetrics(); + // Compute motion metrics + if (first_frame_ == false) + ComputeMotionMetrics(); - // saving current frame as previous one: Y only - memcpy(_prevFrame, _origFrame, _width * _height); + // Saving current frame as previous one: Y only. + memcpy(prev_frame_, orig_frame_, width_ * height_); - _firstFrame = false; - _CAInit = true; + first_frame_ = false; + ca_Init_ = true; - return ContentMetrics(); + return ContentMetrics(); } -int32_t -VPMContentAnalysis::Release() -{ - if (_cMetrics != NULL) - { - delete _cMetrics; - _cMetrics = NULL; - } +int32_t VPMContentAnalysis::Release() { + if (content_metrics_ != NULL) { + delete content_metrics_; + content_metrics_ = NULL; + } - if (_prevFrame != NULL) - { - delete [] _prevFrame; - _prevFrame = NULL; - } + if (prev_frame_ != NULL) { + delete [] prev_frame_; + prev_frame_ = NULL; + } - _width = 0; - _height = 0; - _firstFrame = true; + width_ = 0; + height_ = 0; + first_frame_ = true; - return VPM_OK; + return VPM_OK; } -int32_t -VPMContentAnalysis::Initialize(int width, int height) -{ - _width = width; - _height = height; - _firstFrame = true; +int32_t VPMContentAnalysis::Initialize(int width, int height) { + width_ = width; + height_ = height; + first_frame_ = true; - // skip parameter: # of skipped rows: for complexity reduction - // temporal also currently uses it for column reduction. - _skipNum = 1; + // skip parameter: # of skipped rows: for complexity reduction + // temporal also currently uses it for column reduction. + skip_num_ = 1; - // use skipNum = 2 for 4CIF, WHD - if ( (_height >= 576) && (_width >= 704) ) - { - _skipNum = 2; - } - // use skipNum = 4 for FULLL_HD images - if ( (_height >= 1080) && (_width >= 1920) ) - { - _skipNum = 4; - } + // use skipNum = 2 for 4CIF, WHD + if ( (height_ >= 576) && (width_ >= 704) ) { + skip_num_ = 2; + } + // use skipNum = 4 for FULLL_HD images + if ( (height_ >= 1080) && (width_ >= 1920) ) { + skip_num_ = 4; + } - if (_cMetrics != NULL) - { - delete _cMetrics; - } + if (content_metrics_ != NULL) { + delete content_metrics_; + } - if (_prevFrame != NULL) - { - delete [] _prevFrame; - } + if (prev_frame_ != NULL) { + delete [] prev_frame_; + } - // Spatial Metrics don't work on a border of 8. Minimum processing - // block size is 16 pixels. So make sure the width and height support this. - if (_width <= 32 || _height <= 32) - { - _CAInit = false; - return VPM_PARAMETER_ERROR; - } + // Spatial Metrics don't work on a border of 8. Minimum processing + // block size is 16 pixels. So make sure the width and height support this. + if (width_ <= 32 || height_ <= 32) { + ca_Init_ = false; + return VPM_PARAMETER_ERROR; + } - _cMetrics = new VideoContentMetrics(); - if (_cMetrics == NULL) - { - return VPM_MEMORY; - } + content_metrics_ = new VideoContentMetrics(); + if (content_metrics_ == NULL) { + return VPM_MEMORY; + } - _prevFrame = new uint8_t[_width * _height] ; // Y only - if (_prevFrame == NULL) - { - return VPM_MEMORY; - } + prev_frame_ = new uint8_t[width_ * height_]; // Y only. + if (prev_frame_ == NULL) return VPM_MEMORY; - return VPM_OK; + return VPM_OK; } // Compute motion metrics: magnitude over non-zero motion vectors, // and size of zero cluster -int32_t -VPMContentAnalysis::ComputeMotionMetrics() -{ - - // Motion metrics: only one is derived from normalized - // (MAD) temporal difference - (this->*TemporalDiffMetric)(); - - return VPM_OK; +int32_t VPMContentAnalysis::ComputeMotionMetrics() { + // Motion metrics: only one is derived from normalized + // (MAD) temporal difference + (this->*TemporalDiffMetric)(); + return VPM_OK; } // Normalized temporal difference (MAD): used as a motion level metric // Normalize MAD by spatial contrast: images with more contrast // (pixel variance) likely have larger temporal difference // To reduce complexity, we compute the metric for a reduced set of points. -int32_t -VPMContentAnalysis::TemporalDiffMetric_C() -{ - // size of original frame - int sizei = _height; - int sizej = _width; +int32_t VPMContentAnalysis::TemporalDiffMetric_C() { + // size of original frame + int sizei = height_; + int sizej = width_; + uint32_t tempDiffSum = 0; + uint32_t pixelSum = 0; + uint64_t pixelSqSum = 0; - uint32_t tempDiffSum = 0; - uint32_t pixelSum = 0; - uint64_t pixelSqSum = 0; + uint32_t num_pixels = 0; // Counter for # of pixels. + const int width_end = ((width_ - 2*border_) & -16) + border_; - uint32_t numPixels = 0; // counter for # of pixels + for (int i = border_; i < sizei - border_; i += skip_num_) { + for (int j = border_; j < width_end; j++) { + num_pixels += 1; + int ssn = i * sizej + j; - const int width_end = ((_width - 2*_border) & -16) + _border; + uint8_t currPixel = orig_frame_[ssn]; + uint8_t prevPixel = prev_frame_[ssn]; - for(int i = _border; i < sizei - _border; i += _skipNum) - { - for(int j = _border; j < width_end; j++) - { - numPixels += 1; - int ssn = i * sizej + j; - - uint8_t currPixel = _origFrame[ssn]; - uint8_t prevPixel = _prevFrame[ssn]; - - tempDiffSum += (uint32_t) - abs((int16_t)(currPixel - prevPixel)); - pixelSum += (uint32_t) currPixel; - pixelSqSum += (uint64_t) (currPixel * currPixel); - } + tempDiffSum += (uint32_t)abs((int16_t)(currPixel - prevPixel)); + pixelSum += (uint32_t) currPixel; + pixelSqSum += (uint64_t) (currPixel * currPixel); } + } - // default - _motionMagnitude = 0.0f; + // Default. + motion_magnitude_ = 0.0f; - if (tempDiffSum == 0) - { - return VPM_OK; - } + if (tempDiffSum == 0) return VPM_OK; - // normalize over all pixels - float const tempDiffAvg = (float)tempDiffSum / (float)(numPixels); - float const pixelSumAvg = (float)pixelSum / (float)(numPixels); - float const pixelSqSumAvg = (float)pixelSqSum / (float)(numPixels); - float contrast = pixelSqSumAvg - (pixelSumAvg * pixelSumAvg); - - if (contrast > 0.0) - { - contrast = sqrt(contrast); - _motionMagnitude = tempDiffAvg/contrast; - } - - return VPM_OK; + // Normalize over all pixels. + float const tempDiffAvg = (float)tempDiffSum / (float)(num_pixels); + float const pixelSumAvg = (float)pixelSum / (float)(num_pixels); + float const pixelSqSumAvg = (float)pixelSqSum / (float)(num_pixels); + float contrast = pixelSqSumAvg - (pixelSumAvg * pixelSumAvg); + if (contrast > 0.0) { + contrast = sqrt(contrast); + motion_magnitude_ = tempDiffAvg/contrast; + } + return VPM_OK; } // Compute spatial metrics: @@ -249,88 +205,71 @@ VPMContentAnalysis::TemporalDiffMetric_C() // The metrics are a simple estimate of the up-sampling prediction error, // estimated assuming sub-sampling for decimation (no filtering), // and up-sampling back up with simple bilinear interpolation. -int32_t -VPMContentAnalysis::ComputeSpatialMetrics_C() -{ - //size of original frame - const int sizei = _height; - const int sizej = _width; +int32_t VPMContentAnalysis::ComputeSpatialMetrics_C() { + const int sizei = height_; + const int sizej = width_; - // pixel mean square average: used to normalize the spatial metrics - uint32_t pixelMSA = 0; + // Pixel mean square average: used to normalize the spatial metrics. + uint32_t pixelMSA = 0; - uint32_t spatialErrSum = 0; - uint32_t spatialErrVSum = 0; - uint32_t spatialErrHSum = 0; + uint32_t spatialErrSum = 0; + uint32_t spatialErrVSum = 0; + uint32_t spatialErrHSum = 0; - // make sure work section is a multiple of 16 - const int width_end = ((sizej - 2*_border) & -16) + _border; + // make sure work section is a multiple of 16 + const int width_end = ((sizej - 2*border_) & -16) + border_; - for(int i = _border; i < sizei - _border; i += _skipNum) - { - for(int j = _border; j < width_end; j++) - { + for (int i = border_; i < sizei - border_; i += skip_num_) { + for (int j = border_; j < width_end; j++) { + int ssn1= i * sizej + j; + int ssn2 = (i + 1) * sizej + j; // bottom + int ssn3 = (i - 1) * sizej + j; // top + int ssn4 = i * sizej + j + 1; // right + int ssn5 = i * sizej + j - 1; // left - int ssn1= i * sizej + j; - int ssn2 = (i + 1) * sizej + j; // bottom - int ssn3 = (i - 1) * sizej + j; // top - int ssn4 = i * sizej + j + 1; // right - int ssn5 = i * sizej + j - 1; // left + uint16_t refPixel1 = orig_frame_[ssn1] << 1; + uint16_t refPixel2 = orig_frame_[ssn1] << 2; - uint16_t refPixel1 = _origFrame[ssn1] << 1; - uint16_t refPixel2 = _origFrame[ssn1] << 2; + uint8_t bottPixel = orig_frame_[ssn2]; + uint8_t topPixel = orig_frame_[ssn3]; + uint8_t rightPixel = orig_frame_[ssn4]; + uint8_t leftPixel = orig_frame_[ssn5]; - uint8_t bottPixel = _origFrame[ssn2]; - uint8_t topPixel = _origFrame[ssn3]; - uint8_t rightPixel = _origFrame[ssn4]; - uint8_t leftPixel = _origFrame[ssn5]; - - spatialErrSum += (uint32_t) abs((int16_t)(refPixel2 - - (uint16_t)(bottPixel + topPixel - + leftPixel + rightPixel))); - spatialErrVSum += (uint32_t) abs((int16_t)(refPixel1 - - (uint16_t)(bottPixel + topPixel))); - spatialErrHSum += (uint32_t) abs((int16_t)(refPixel1 - - (uint16_t)(leftPixel + rightPixel))); - - pixelMSA += _origFrame[ssn1]; - } + spatialErrSum += (uint32_t) abs((int16_t)(refPixel2 + - (uint16_t)(bottPixel + topPixel + leftPixel + rightPixel))); + spatialErrVSum += (uint32_t) abs((int16_t)(refPixel1 + - (uint16_t)(bottPixel + topPixel))); + spatialErrHSum += (uint32_t) abs((int16_t)(refPixel1 + - (uint16_t)(leftPixel + rightPixel))); + pixelMSA += orig_frame_[ssn1]; } + } - // normalize over all pixels - const float spatialErr = (float)(spatialErrSum >> 2); - const float spatialErrH = (float)(spatialErrHSum >> 1); - const float spatialErrV = (float)(spatialErrVSum >> 1); - const float norm = (float)pixelMSA; + // Normalize over all pixels. + const float spatialErr = (float)(spatialErrSum >> 2); + const float spatialErrH = (float)(spatialErrHSum >> 1); + const float spatialErrV = (float)(spatialErrVSum >> 1); + const float norm = (float)pixelMSA; - // 2X2: - _spatialPredErr = spatialErr / norm; - - // 1X2: - _spatialPredErrH = spatialErrH / norm; - - // 2X1: - _spatialPredErrV = spatialErrV / norm; - - return VPM_OK; + // 2X2: + spatial_pred_err_ = spatialErr / norm; + // 1X2: + spatial_pred_err_h_ = spatialErrH / norm; + // 2X1: + spatial_pred_err_v_ = spatialErrV / norm; + return VPM_OK; } -VideoContentMetrics* -VPMContentAnalysis::ContentMetrics() -{ - if (_CAInit == false) - { - return NULL; - } +VideoContentMetrics* VPMContentAnalysis::ContentMetrics() { + if (ca_Init_ == false) return NULL; - _cMetrics->spatial_pred_err = _spatialPredErr; - _cMetrics->spatial_pred_err_h = _spatialPredErrH; - _cMetrics->spatial_pred_err_v = _spatialPredErrV; - // Motion metric: normalized temporal difference (MAD) - _cMetrics->motion_magnitude = _motionMagnitude; - - return _cMetrics; + content_metrics_->spatial_pred_err = spatial_pred_err_; + content_metrics_->spatial_pred_err_h = spatial_pred_err_h_; + content_metrics_->spatial_pred_err_v = spatial_pred_err_v_; + // Motion metric: normalized temporal difference (MAD). + content_metrics_->motion_magnitude = motion_magnitude_; + return content_metrics_; } -} // namespace +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/content_analysis.h b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/content_analysis.h index a1e485f1c362..54b5b45c4eb1 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/content_analysis.h +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/content_analysis.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef VPM_CONTENT_ANALYSIS_H -#define VPM_CONTENT_ANALYSIS_H +#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_CONTENT_ANALYSIS_H +#define WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_CONTENT_ANALYSIS_H #include "webrtc/common_video/interface/i420_video_frame.h" #include "webrtc/modules/interface/module_common_types.h" @@ -18,75 +18,71 @@ namespace webrtc { -class VPMContentAnalysis -{ -public: - // When |runtime_cpu_detection| is true, runtime selection of an optimized - // code path is allowed. - VPMContentAnalysis(bool runtime_cpu_detection); - ~VPMContentAnalysis(); +class VPMContentAnalysis { + public: + // When |runtime_cpu_detection| is true, runtime selection of an optimized + // code path is allowed. + explicit VPMContentAnalysis(bool runtime_cpu_detection); + ~VPMContentAnalysis(); - // Initialize ContentAnalysis - should be called prior to - // extractContentFeature - // Inputs: width, height - // Return value: 0 if OK, negative value upon error - int32_t Initialize(int width, int height); + // Initialize ContentAnalysis - should be called prior to + // extractContentFeature + // Inputs: width, height + // Return value: 0 if OK, negative value upon error + int32_t Initialize(int width, int height); - // Extract content Feature - main function of ContentAnalysis - // Input: new frame - // Return value: pointer to structure containing content Analysis - // metrics or NULL value upon error - VideoContentMetrics* ComputeContentMetrics(const I420VideoFrame& - inputFrame); + // Extract content Feature - main function of ContentAnalysis + // Input: new frame + // Return value: pointer to structure containing content Analysis + // metrics or NULL value upon error + VideoContentMetrics* ComputeContentMetrics(const I420VideoFrame& + inputFrame); - // Release all allocated memory - // Output: 0 if OK, negative value upon error - int32_t Release(); + // Release all allocated memory + // Output: 0 if OK, negative value upon error + int32_t Release(); -private: + private: + // return motion metrics + VideoContentMetrics* ContentMetrics(); - // return motion metrics - VideoContentMetrics* ContentMetrics(); + // Normalized temporal difference metric: for motion magnitude + typedef int32_t (VPMContentAnalysis::*TemporalDiffMetricFunc)(); + TemporalDiffMetricFunc TemporalDiffMetric; + int32_t TemporalDiffMetric_C(); - // Normalized temporal difference metric: for motion magnitude - typedef int32_t (VPMContentAnalysis::*TemporalDiffMetricFunc)(); - TemporalDiffMetricFunc TemporalDiffMetric; - int32_t TemporalDiffMetric_C(); + // Motion metric method: call 2 metrics (magnitude and size) + int32_t ComputeMotionMetrics(); - // Motion metric method: call 2 metrics (magnitude and size) - int32_t ComputeMotionMetrics(); - - // Spatial metric method: computes the 3 frame-average spatial - // prediction errors (1x2,2x1,2x2) - typedef int32_t (VPMContentAnalysis::*ComputeSpatialMetricsFunc)(); - ComputeSpatialMetricsFunc ComputeSpatialMetrics; - int32_t ComputeSpatialMetrics_C(); + // Spatial metric method: computes the 3 frame-average spatial + // prediction errors (1x2,2x1,2x2) + typedef int32_t (VPMContentAnalysis::*ComputeSpatialMetricsFunc)(); + ComputeSpatialMetricsFunc ComputeSpatialMetrics; + int32_t ComputeSpatialMetrics_C(); #if defined(WEBRTC_ARCH_X86_FAMILY) - int32_t ComputeSpatialMetrics_SSE2(); - int32_t TemporalDiffMetric_SSE2(); + int32_t ComputeSpatialMetrics_SSE2(); + int32_t TemporalDiffMetric_SSE2(); #endif - const uint8_t* _origFrame; - uint8_t* _prevFrame; - int _width; - int _height; - int _skipNum; - int _border; + const uint8_t* orig_frame_; + uint8_t* prev_frame_; + int width_; + int height_; + int skip_num_; + int border_; - // Content Metrics: - // stores the local average of the metrics - float _motionMagnitude; // motion class - float _spatialPredErr; // spatial class - float _spatialPredErrH; // spatial class - float _spatialPredErrV; // spatial class - bool _firstFrame; - bool _CAInit; + // Content Metrics: Stores the local average of the metrics. + float motion_magnitude_; // motion class + float spatial_pred_err_; // spatial class + float spatial_pred_err_h_; // spatial class + float spatial_pred_err_v_; // spatial class + bool first_frame_; + bool ca_Init_; - VideoContentMetrics* _cMetrics; + VideoContentMetrics* content_metrics_; +}; -}; // end of VPMContentAnalysis class definition +} // namespace webrtc -} // namespace - -#endif +#endif // WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_CONTENT_ANALYSIS_H diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/content_analysis_sse2.cc b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/content_analysis_sse2.cc index 565312bc08f7..17b64ff28043 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/content_analysis_sse2.cc +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/content_analysis_sse2.cc @@ -15,284 +15,248 @@ namespace webrtc { -int32_t -VPMContentAnalysis::TemporalDiffMetric_SSE2() -{ - uint32_t numPixels = 0; // counter for # of pixels +int32_t VPMContentAnalysis::TemporalDiffMetric_SSE2() { + uint32_t num_pixels = 0; // counter for # of pixels + const uint8_t* imgBufO = orig_frame_ + border_*width_ + border_; + const uint8_t* imgBufP = prev_frame_ + border_*width_ + border_; - const uint8_t* imgBufO = _origFrame + _border*_width + _border; - const uint8_t* imgBufP = _prevFrame + _border*_width + _border; + const int32_t width_end = ((width_ - 2*border_) & -16) + border_; - const int32_t width_end = ((_width - 2*_border) & -16) + _border; + __m128i sad_64 = _mm_setzero_si128(); + __m128i sum_64 = _mm_setzero_si128(); + __m128i sqsum_64 = _mm_setzero_si128(); + const __m128i z = _mm_setzero_si128(); - __m128i sad_64 = _mm_setzero_si128(); - __m128i sum_64 = _mm_setzero_si128(); - __m128i sqsum_64 = _mm_setzero_si128(); - const __m128i z = _mm_setzero_si128(); + for (uint16_t i = 0; i < (height_ - 2*border_); i += skip_num_) { + __m128i sqsum_32 = _mm_setzero_si128(); - for(uint16_t i = 0; i < (_height - 2*_border); i += _skipNum) - { - __m128i sqsum_32 = _mm_setzero_si128(); + const uint8_t *lineO = imgBufO; + const uint8_t *lineP = imgBufP; - const uint8_t *lineO = imgBufO; - const uint8_t *lineP = imgBufP; + // Work on 16 pixels at a time. For HD content with a width of 1920 + // this loop will run ~67 times (depending on border). Maximum for + // abs(o-p) and sum(o) will be 255. _mm_sad_epu8 produces 2 64 bit + // results which are then accumulated. There is no chance of + // rollover for these two accumulators. + // o*o will have a maximum of 255*255 = 65025. This will roll over + // a 16 bit accumulator as 67*65025 > 65535, but will fit in a + // 32 bit accumulator. + for (uint16_t j = 0; j < width_end - border_; j += 16) { + const __m128i o = _mm_loadu_si128((__m128i*)(lineO)); + const __m128i p = _mm_loadu_si128((__m128i*)(lineP)); - // Work on 16 pixels at a time. For HD content with a width of 1920 - // this loop will run ~67 times (depending on border). Maximum for - // abs(o-p) and sum(o) will be 255. _mm_sad_epu8 produces 2 64 bit - // results which are then accumulated. There is no chance of - // rollover for these two accumulators. - // o*o will have a maximum of 255*255 = 65025. This will roll over - // a 16 bit accumulator as 67*65025 > 65535, but will fit in a - // 32 bit accumulator. - for(uint16_t j = 0; j < width_end - _border; j += 16) - { - const __m128i o = _mm_loadu_si128((__m128i*)(lineO)); - const __m128i p = _mm_loadu_si128((__m128i*)(lineP)); + lineO += 16; + lineP += 16; - lineO += 16; - lineP += 16; + // Abs pixel difference between frames. + sad_64 = _mm_add_epi64 (sad_64, _mm_sad_epu8(o, p)); - // abs pixel difference between frames - sad_64 = _mm_add_epi64 (sad_64, _mm_sad_epu8(o, p)); + // sum of all pixels in frame + sum_64 = _mm_add_epi64 (sum_64, _mm_sad_epu8(o, z)); - // sum of all pixels in frame - sum_64 = _mm_add_epi64 (sum_64, _mm_sad_epu8(o, z)); + // Squared sum of all pixels in frame. + const __m128i olo = _mm_unpacklo_epi8(o,z); + const __m128i ohi = _mm_unpackhi_epi8(o,z); - // squared sum of all pixels in frame - const __m128i olo = _mm_unpacklo_epi8(o,z); - const __m128i ohi = _mm_unpackhi_epi8(o,z); + const __m128i sqsum_32_lo = _mm_madd_epi16(olo, olo); + const __m128i sqsum_32_hi = _mm_madd_epi16(ohi, ohi); - const __m128i sqsum_32_lo = _mm_madd_epi16(olo, olo); - const __m128i sqsum_32_hi = _mm_madd_epi16(ohi, ohi); - - sqsum_32 = _mm_add_epi32(sqsum_32, sqsum_32_lo); - sqsum_32 = _mm_add_epi32(sqsum_32, sqsum_32_hi); - } - - // Add to 64 bit running sum as to not roll over. - sqsum_64 = _mm_add_epi64(sqsum_64, - _mm_add_epi64(_mm_unpackhi_epi32(sqsum_32,z), - _mm_unpacklo_epi32(sqsum_32,z))); - - imgBufO += _width * _skipNum; - imgBufP += _width * _skipNum; - numPixels += (width_end - _border); + sqsum_32 = _mm_add_epi32(sqsum_32, sqsum_32_lo); + sqsum_32 = _mm_add_epi32(sqsum_32, sqsum_32_hi); } - __m128i sad_final_128; - __m128i sum_final_128; - __m128i sqsum_final_128; + // Add to 64 bit running sum as to not roll over. + sqsum_64 = _mm_add_epi64(sqsum_64, + _mm_add_epi64(_mm_unpackhi_epi32(sqsum_32,z), + _mm_unpacklo_epi32(sqsum_32,z))); - // bring sums out of vector registers and into integer register - // domain, summing them along the way - _mm_store_si128 (&sad_final_128, sad_64); - _mm_store_si128 (&sum_final_128, sum_64); - _mm_store_si128 (&sqsum_final_128, sqsum_64); + imgBufO += width_ * skip_num_; + imgBufP += width_ * skip_num_; + num_pixels += (width_end - border_); + } - uint64_t *sad_final_64 = - reinterpret_cast(&sad_final_128); - uint64_t *sum_final_64 = - reinterpret_cast(&sum_final_128); - uint64_t *sqsum_final_64 = - reinterpret_cast(&sqsum_final_128); + __m128i sad_final_128; + __m128i sum_final_128; + __m128i sqsum_final_128; - const uint32_t pixelSum = sum_final_64[0] + sum_final_64[1]; - const uint64_t pixelSqSum = sqsum_final_64[0] + sqsum_final_64[1]; - const uint32_t tempDiffSum = sad_final_64[0] + sad_final_64[1]; + // Bring sums out of vector registers and into integer register + // domain, summing them along the way. + _mm_store_si128 (&sad_final_128, sad_64); + _mm_store_si128 (&sum_final_128, sum_64); + _mm_store_si128 (&sqsum_final_128, sqsum_64); - // default - _motionMagnitude = 0.0f; + uint64_t *sad_final_64 = reinterpret_cast(&sad_final_128); + uint64_t *sum_final_64 = reinterpret_cast(&sum_final_128); + uint64_t *sqsum_final_64 = reinterpret_cast(&sqsum_final_128); - if (tempDiffSum == 0) - { - return VPM_OK; - } + const uint32_t pixelSum = sum_final_64[0] + sum_final_64[1]; + const uint64_t pixelSqSum = sqsum_final_64[0] + sqsum_final_64[1]; + const uint32_t tempDiffSum = sad_final_64[0] + sad_final_64[1]; - // normalize over all pixels - const float tempDiffAvg = (float)tempDiffSum / (float)(numPixels); - const float pixelSumAvg = (float)pixelSum / (float)(numPixels); - const float pixelSqSumAvg = (float)pixelSqSum / (float)(numPixels); - float contrast = pixelSqSumAvg - (pixelSumAvg * pixelSumAvg); + // Default. + motion_magnitude_ = 0.0f; - if (contrast > 0.0) - { - contrast = sqrt(contrast); - _motionMagnitude = tempDiffAvg/contrast; - } + if (tempDiffSum == 0) return VPM_OK; - return VPM_OK; + // Normalize over all pixels. + const float tempDiffAvg = (float)tempDiffSum / (float)(num_pixels); + const float pixelSumAvg = (float)pixelSum / (float)(num_pixels); + const float pixelSqSumAvg = (float)pixelSqSum / (float)(num_pixels); + float contrast = pixelSqSumAvg - (pixelSumAvg * pixelSumAvg); + + if (contrast > 0.0) { + contrast = sqrt(contrast); + motion_magnitude_ = tempDiffAvg/contrast; + } + + return VPM_OK; } -int32_t -VPMContentAnalysis::ComputeSpatialMetrics_SSE2() -{ - const uint8_t* imgBuf = _origFrame + _border*_width; - const int32_t width_end = ((_width - 2*_border) & -16) + _border; +int32_t VPMContentAnalysis::ComputeSpatialMetrics_SSE2() { + const uint8_t* imgBuf = orig_frame_ + border_*width_; + const int32_t width_end = ((width_ - 2 * border_) & -16) + border_; - __m128i se_32 = _mm_setzero_si128(); - __m128i sev_32 = _mm_setzero_si128(); - __m128i seh_32 = _mm_setzero_si128(); - __m128i msa_32 = _mm_setzero_si128(); - const __m128i z = _mm_setzero_si128(); + __m128i se_32 = _mm_setzero_si128(); + __m128i sev_32 = _mm_setzero_si128(); + __m128i seh_32 = _mm_setzero_si128(); + __m128i msa_32 = _mm_setzero_si128(); + const __m128i z = _mm_setzero_si128(); - // Error is accumulated as a 32 bit value. Looking at HD content with a - // height of 1080 lines, or about 67 macro blocks. If the 16 bit row - // value is maxed out at 65529 for every row, 65529*1080 = 70777800, which - // will not roll over a 32 bit accumulator. - // _skipNum is also used to reduce the number of rows - for(int32_t i = 0; i < (_height - 2*_border); i += _skipNum) - { - __m128i se_16 = _mm_setzero_si128(); - __m128i sev_16 = _mm_setzero_si128(); - __m128i seh_16 = _mm_setzero_si128(); - __m128i msa_16 = _mm_setzero_si128(); + // Error is accumulated as a 32 bit value. Looking at HD content with a + // height of 1080 lines, or about 67 macro blocks. If the 16 bit row + // value is maxed out at 65529 for every row, 65529*1080 = 70777800, which + // will not roll over a 32 bit accumulator. + // skip_num_ is also used to reduce the number of rows + for (int32_t i = 0; i < (height_ - 2*border_); i += skip_num_) { + __m128i se_16 = _mm_setzero_si128(); + __m128i sev_16 = _mm_setzero_si128(); + __m128i seh_16 = _mm_setzero_si128(); + __m128i msa_16 = _mm_setzero_si128(); - // Row error is accumulated as a 16 bit value. There are 8 - // accumulators. Max value of a 16 bit number is 65529. Looking - // at HD content, 1080p, has a width of 1920, 120 macro blocks. - // A mb at a time is processed at a time. Absolute max error at - // a point would be abs(0-255+255+255+255) which equals 1020. - // 120*1020 = 122400. The probability of hitting this is quite low - // on well behaved content. A specially crafted image could roll over. - // _border could also be adjusted to concentrate on just the center of - // the images for an HD capture in order to reduce the possiblity of - // rollover. - const uint8_t *lineTop = imgBuf - _width + _border; - const uint8_t *lineCen = imgBuf + _border; - const uint8_t *lineBot = imgBuf + _width + _border; + // Row error is accumulated as a 16 bit value. There are 8 + // accumulators. Max value of a 16 bit number is 65529. Looking + // at HD content, 1080p, has a width of 1920, 120 macro blocks. + // A mb at a time is processed at a time. Absolute max error at + // a point would be abs(0-255+255+255+255) which equals 1020. + // 120*1020 = 122400. The probability of hitting this is quite low + // on well behaved content. A specially crafted image could roll over. + // border_ could also be adjusted to concentrate on just the center of + // the images for an HD capture in order to reduce the possiblity of + // rollover. + const uint8_t *lineTop = imgBuf - width_ + border_; + const uint8_t *lineCen = imgBuf + border_; + const uint8_t *lineBot = imgBuf + width_ + border_; - for(int32_t j = 0; j < width_end - _border; j += 16) - { - const __m128i t = _mm_loadu_si128((__m128i*)(lineTop)); - const __m128i l = _mm_loadu_si128((__m128i*)(lineCen - 1)); - const __m128i c = _mm_loadu_si128((__m128i*)(lineCen)); - const __m128i r = _mm_loadu_si128((__m128i*)(lineCen + 1)); - const __m128i b = _mm_loadu_si128((__m128i*)(lineBot)); + for (int32_t j = 0; j < width_end - border_; j += 16) { + const __m128i t = _mm_loadu_si128((__m128i*)(lineTop)); + const __m128i l = _mm_loadu_si128((__m128i*)(lineCen - 1)); + const __m128i c = _mm_loadu_si128((__m128i*)(lineCen)); + const __m128i r = _mm_loadu_si128((__m128i*)(lineCen + 1)); + const __m128i b = _mm_loadu_si128((__m128i*)(lineBot)); - lineTop += 16; - lineCen += 16; - lineBot += 16; + lineTop += 16; + lineCen += 16; + lineBot += 16; - // center pixel unpacked - __m128i clo = _mm_unpacklo_epi8(c,z); - __m128i chi = _mm_unpackhi_epi8(c,z); + // center pixel unpacked + __m128i clo = _mm_unpacklo_epi8(c,z); + __m128i chi = _mm_unpackhi_epi8(c,z); - // left right pixels unpacked and added together - const __m128i lrlo = _mm_add_epi16(_mm_unpacklo_epi8(l,z), - _mm_unpacklo_epi8(r,z)); - const __m128i lrhi = _mm_add_epi16(_mm_unpackhi_epi8(l,z), - _mm_unpackhi_epi8(r,z)); + // left right pixels unpacked and added together + const __m128i lrlo = _mm_add_epi16(_mm_unpacklo_epi8(l,z), + _mm_unpacklo_epi8(r,z)); + const __m128i lrhi = _mm_add_epi16(_mm_unpackhi_epi8(l,z), + _mm_unpackhi_epi8(r,z)); - // top & bottom pixels unpacked and added together - const __m128i tblo = _mm_add_epi16(_mm_unpacklo_epi8(t,z), - _mm_unpacklo_epi8(b,z)); - const __m128i tbhi = _mm_add_epi16(_mm_unpackhi_epi8(t,z), - _mm_unpackhi_epi8(b,z)); + // top & bottom pixels unpacked and added together + const __m128i tblo = _mm_add_epi16(_mm_unpacklo_epi8(t,z), + _mm_unpacklo_epi8(b,z)); + const __m128i tbhi = _mm_add_epi16(_mm_unpackhi_epi8(t,z), + _mm_unpackhi_epi8(b,z)); - // running sum of all pixels - msa_16 = _mm_add_epi16(msa_16, _mm_add_epi16(chi, clo)); + // running sum of all pixels + msa_16 = _mm_add_epi16(msa_16, _mm_add_epi16(chi, clo)); - clo = _mm_slli_epi16(clo, 1); - chi = _mm_slli_epi16(chi, 1); - const __m128i sevtlo = _mm_subs_epi16(clo, tblo); - const __m128i sevthi = _mm_subs_epi16(chi, tbhi); - const __m128i sehtlo = _mm_subs_epi16(clo, lrlo); - const __m128i sehthi = _mm_subs_epi16(chi, lrhi); + clo = _mm_slli_epi16(clo, 1); + chi = _mm_slli_epi16(chi, 1); + const __m128i sevtlo = _mm_subs_epi16(clo, tblo); + const __m128i sevthi = _mm_subs_epi16(chi, tbhi); + const __m128i sehtlo = _mm_subs_epi16(clo, lrlo); + const __m128i sehthi = _mm_subs_epi16(chi, lrhi); - clo = _mm_slli_epi16(clo, 1); - chi = _mm_slli_epi16(chi, 1); - const __m128i setlo = _mm_subs_epi16(clo, - _mm_add_epi16(lrlo, tblo)); - const __m128i sethi = _mm_subs_epi16(chi, - _mm_add_epi16(lrhi, tbhi)); + clo = _mm_slli_epi16(clo, 1); + chi = _mm_slli_epi16(chi, 1); + const __m128i setlo = _mm_subs_epi16(clo, _mm_add_epi16(lrlo, tblo)); + const __m128i sethi = _mm_subs_epi16(chi, _mm_add_epi16(lrhi, tbhi)); - // Add to 16 bit running sum - se_16 = _mm_add_epi16(se_16, - _mm_max_epi16(setlo, - _mm_subs_epi16(z, setlo))); - se_16 = _mm_add_epi16(se_16, - _mm_max_epi16(sethi, - _mm_subs_epi16(z, sethi))); - sev_16 = _mm_add_epi16(sev_16, - _mm_max_epi16(sevtlo, - _mm_subs_epi16(z, sevtlo))); - sev_16 = _mm_add_epi16(sev_16, - _mm_max_epi16(sevthi, - _mm_subs_epi16(z, sevthi))); - seh_16 = _mm_add_epi16(seh_16, - _mm_max_epi16(sehtlo, - _mm_subs_epi16(z, sehtlo))); - seh_16 = _mm_add_epi16(seh_16, - _mm_max_epi16(sehthi, - _mm_subs_epi16(z, sehthi))); - } - - // Add to 32 bit running sum as to not roll over. - se_32 = _mm_add_epi32(se_32, - _mm_add_epi32(_mm_unpackhi_epi16(se_16,z), - _mm_unpacklo_epi16(se_16,z))); - sev_32 = _mm_add_epi32(sev_32, - _mm_add_epi32(_mm_unpackhi_epi16(sev_16,z), - _mm_unpacklo_epi16(sev_16,z))); - seh_32 = _mm_add_epi32(seh_32, - _mm_add_epi32(_mm_unpackhi_epi16(seh_16,z), - _mm_unpacklo_epi16(seh_16,z))); - msa_32 = _mm_add_epi32(msa_32, - _mm_add_epi32(_mm_unpackhi_epi16(msa_16,z), - _mm_unpacklo_epi16(msa_16,z))); - - imgBuf += _width * _skipNum; + // Add to 16 bit running sum + se_16 = _mm_add_epi16(se_16, _mm_max_epi16(setlo, + _mm_subs_epi16(z, setlo))); + se_16 = _mm_add_epi16(se_16, _mm_max_epi16(sethi, + _mm_subs_epi16(z, sethi))); + sev_16 = _mm_add_epi16(sev_16, _mm_max_epi16(sevtlo, + _mm_subs_epi16(z, sevtlo))); + sev_16 = _mm_add_epi16(sev_16, _mm_max_epi16(sevthi, + _mm_subs_epi16(z, sevthi))); + seh_16 = _mm_add_epi16(seh_16, _mm_max_epi16(sehtlo, + _mm_subs_epi16(z, sehtlo))); + seh_16 = _mm_add_epi16(seh_16, _mm_max_epi16(sehthi, + _mm_subs_epi16(z, sehthi))); } - __m128i se_128; - __m128i sev_128; - __m128i seh_128; - __m128i msa_128; + // Add to 32 bit running sum as to not roll over. + se_32 = _mm_add_epi32(se_32, _mm_add_epi32(_mm_unpackhi_epi16(se_16,z), + _mm_unpacklo_epi16(se_16,z))); + sev_32 = _mm_add_epi32(sev_32, _mm_add_epi32(_mm_unpackhi_epi16(sev_16,z), + _mm_unpacklo_epi16(sev_16,z))); + seh_32 = _mm_add_epi32(seh_32, _mm_add_epi32(_mm_unpackhi_epi16(seh_16,z), + _mm_unpacklo_epi16(seh_16,z))); + msa_32 = _mm_add_epi32(msa_32, _mm_add_epi32(_mm_unpackhi_epi16(msa_16,z), + _mm_unpacklo_epi16(msa_16,z))); - // bring sums out of vector registers and into integer register - // domain, summing them along the way - _mm_store_si128 (&se_128, - _mm_add_epi64(_mm_unpackhi_epi32(se_32,z), - _mm_unpacklo_epi32(se_32,z))); - _mm_store_si128 (&sev_128, - _mm_add_epi64(_mm_unpackhi_epi32(sev_32,z), - _mm_unpacklo_epi32(sev_32,z))); - _mm_store_si128 (&seh_128, - _mm_add_epi64(_mm_unpackhi_epi32(seh_32,z), - _mm_unpacklo_epi32(seh_32,z))); - _mm_store_si128 (&msa_128, - _mm_add_epi64(_mm_unpackhi_epi32(msa_32,z), - _mm_unpacklo_epi32(msa_32,z))); + imgBuf += width_ * skip_num_; + } - uint64_t *se_64 = - reinterpret_cast(&se_128); - uint64_t *sev_64 = - reinterpret_cast(&sev_128); - uint64_t *seh_64 = - reinterpret_cast(&seh_128); - uint64_t *msa_64 = - reinterpret_cast(&msa_128); + __m128i se_128; + __m128i sev_128; + __m128i seh_128; + __m128i msa_128; - const uint32_t spatialErrSum = se_64[0] + se_64[1]; - const uint32_t spatialErrVSum = sev_64[0] + sev_64[1]; - const uint32_t spatialErrHSum = seh_64[0] + seh_64[1]; - const uint32_t pixelMSA = msa_64[0] + msa_64[1]; + // Bring sums out of vector registers and into integer register + // domain, summing them along the way. + _mm_store_si128 (&se_128, _mm_add_epi64(_mm_unpackhi_epi32(se_32,z), + _mm_unpacklo_epi32(se_32,z))); + _mm_store_si128 (&sev_128, _mm_add_epi64(_mm_unpackhi_epi32(sev_32,z), + _mm_unpacklo_epi32(sev_32,z))); + _mm_store_si128 (&seh_128, _mm_add_epi64(_mm_unpackhi_epi32(seh_32,z), + _mm_unpacklo_epi32(seh_32,z))); + _mm_store_si128 (&msa_128, _mm_add_epi64(_mm_unpackhi_epi32(msa_32,z), + _mm_unpacklo_epi32(msa_32,z))); - // normalize over all pixels - const float spatialErr = (float)(spatialErrSum >> 2); - const float spatialErrH = (float)(spatialErrHSum >> 1); - const float spatialErrV = (float)(spatialErrVSum >> 1); - const float norm = (float)pixelMSA; + uint64_t *se_64 = reinterpret_cast(&se_128); + uint64_t *sev_64 = reinterpret_cast(&sev_128); + uint64_t *seh_64 = reinterpret_cast(&seh_128); + uint64_t *msa_64 = reinterpret_cast(&msa_128); - // 2X2: - _spatialPredErr = spatialErr / norm; + const uint32_t spatialErrSum = se_64[0] + se_64[1]; + const uint32_t spatialErrVSum = sev_64[0] + sev_64[1]; + const uint32_t spatialErrHSum = seh_64[0] + seh_64[1]; + const uint32_t pixelMSA = msa_64[0] + msa_64[1]; - // 1X2: - _spatialPredErrH = spatialErrH / norm; + // Normalize over all pixels. + const float spatialErr = (float)(spatialErrSum >> 2); + const float spatialErrH = (float)(spatialErrHSum >> 1); + const float spatialErrV = (float)(spatialErrVSum >> 1); + const float norm = (float)pixelMSA; - // 2X1: - _spatialPredErrV = spatialErrV / norm; + // 2X2: + spatial_pred_err_ = spatialErr / norm; + + // 1X2: + spatial_pred_err_h_ = spatialErrH / norm; + + // 2X1: + spatial_pred_err_v_ = spatialErrV / norm; return VPM_OK; } diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/deflickering.cc b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/deflickering.cc index a4b55c1c13b6..898fd80f4734 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/deflickering.cc +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/deflickering.cc @@ -20,431 +20,386 @@ namespace webrtc { // Detection constants -enum { kFrequencyDeviation = 39 }; // (Q4) Maximum allowed deviation for detection -enum { kMinFrequencyToDetect = 32 }; // (Q4) Minimum frequency that can be detected -enum { kNumFlickerBeforeDetect = 2 }; // Number of flickers before we accept detection -enum { kMeanValueScaling = 4 }; // (Q4) In power of 2 -enum { kZeroCrossingDeadzone = 10 }; // Deadzone region in terms of pixel values - -// Deflickering constants +// (Q4) Maximum allowed deviation for detection. +enum { kFrequencyDeviation = 39 }; +// (Q4) Minimum frequency that can be detected. +enum { kMinFrequencyToDetect = 32 }; +// Number of flickers before we accept detection +enum { kNumFlickerBeforeDetect = 2 }; +enum { kmean_valueScaling = 4 }; // (Q4) In power of 2 +// Dead-zone region in terms of pixel values +enum { kZeroCrossingDeadzone = 10 }; +// Deflickering constants. // Compute the quantiles over 1 / DownsamplingFactor of the image. enum { kDownsamplingFactor = 8 }; enum { kLog2OfDownsamplingFactor = 3 }; // To generate in Matlab: -// >> probUW16 = round(2^11 * [0.05,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,0.95,0.97]); +// >> probUW16 = round(2^11 * +// [0.05,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,0.95,0.97]); // >> fprintf('%d, ', probUW16) -// Resolution reduced to avoid overflow when multiplying with the (potentially) large -// number of pixels. -const uint16_t VPMDeflickering::_probUW16[kNumProbs] = - {102, 205, 410, 614, 819, 1024, 1229, 1434, 1638, 1843, 1946, 1987}; // +// Resolution reduced to avoid overflow when multiplying with the +// (potentially) large number of pixels. +const uint16_t VPMDeflickering::prob_uw16_[kNumProbs] = {102, 205, 410, 614, + 819, 1024, 1229, 1434, 1638, 1843, 1946, 1987}; // // To generate in Matlab: // >> numQuants = 14; maxOnlyLength = 5; -// >> weightUW16 = round(2^15 * [linspace(0.5, 1.0, numQuants - maxOnlyLength)]); +// >> weightUW16 = round(2^15 * +// [linspace(0.5, 1.0, numQuants - maxOnlyLength)]); // >> fprintf('%d, %d,\n ', weightUW16); -const uint16_t VPMDeflickering::_weightUW16[kNumQuants - kMaxOnlyLength] = +const uint16_t VPMDeflickering::weight_uw16_[kNumQuants - kMaxOnlyLength] = {16384, 18432, 20480, 22528, 24576, 26624, 28672, 30720, 32768}; // - -VPMDeflickering::VPMDeflickering() : - _id(0) -{ - Reset(); + +VPMDeflickering::VPMDeflickering() + : id_(0) { + Reset(); } -VPMDeflickering::~VPMDeflickering() -{ +VPMDeflickering::~VPMDeflickering() {} + +int32_t VPMDeflickering::ChangeUniqueId(const int32_t id) { + id_ = id; + return 0; } -int32_t -VPMDeflickering::ChangeUniqueId(const int32_t id) -{ - _id = id; +void VPMDeflickering::Reset() { + mean_buffer_length_ = 0; + detection_state_ = 0; + frame_rate_ = 0; + + memset(mean_buffer_, 0, sizeof(int32_t) * kMeanBufferLength); + memset(timestamp_buffer_, 0, sizeof(int32_t) * kMeanBufferLength); + + // Initialize the history with a uniformly distributed histogram. + quant_hist_uw8_[0][0] = 0; + quant_hist_uw8_[0][kNumQuants - 1] = 255; + for (int32_t i = 0; i < kNumProbs; i++) { + quant_hist_uw8_[0][i + 1] = static_cast((WEBRTC_SPL_UMUL_16_16( + prob_uw16_[i], 255) + (1 << 10)) >> 11); // Unsigned round. + } + + for (int32_t i = 1; i < kFrameHistory_size; i++) { + memcpy(quant_hist_uw8_[i], quant_hist_uw8_[0], + sizeof(uint8_t) * kNumQuants); + } +} + +int32_t VPMDeflickering::ProcessFrame(I420VideoFrame* frame, + VideoProcessingModule::FrameStats* stats) { + assert(frame); + uint32_t frame_memory; + uint8_t quant_uw8[kNumQuants]; + uint8_t maxquant_uw8[kNumQuants]; + uint8_t minquant_uw8[kNumQuants]; + uint16_t target_quant_uw16[kNumQuants]; + uint16_t increment_uw16; + uint8_t map_uw8[256]; + + uint16_t tmp_uw16; + uint32_t tmp_uw32; + int width = frame->width(); + int height = frame->height(); + + if (frame->IsZeroSize()) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_, + "Null frame pointer"); + return VPM_GENERAL_ERROR; + } + + // Stricter height check due to subsampling size calculation below. + if (height < 2) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_, + "Invalid frame size"); + return VPM_GENERAL_ERROR; + } + + if (!VideoProcessingModule::ValidFrameStats(*stats)) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_, + "Invalid frame stats"); + return VPM_GENERAL_ERROR; + } + + if (PreDetection(frame->timestamp(), *stats) == -1) return VPM_GENERAL_ERROR; + + // Flicker detection + int32_t det_flicker = DetectFlicker(); + if (det_flicker < 0) { + return VPM_GENERAL_ERROR; + } else if (det_flicker != 1) { return 0; -} + } -void -VPMDeflickering::Reset() -{ - _meanBufferLength = 0; - _detectionState = 0; - _frameRate = 0; + // Size of luminance component. + const uint32_t y_size = height * width; - memset(_meanBuffer, 0, sizeof(int32_t) * kMeanBufferLength); - memset(_timestampBuffer, 0, sizeof(int32_t) * kMeanBufferLength); + const uint32_t y_sub_size = width * (((height - 1) >> + kLog2OfDownsamplingFactor) + 1); + uint8_t* y_sorted = new uint8_t[y_sub_size]; + uint32_t sort_row_idx = 0; + for (int i = 0; i < height; i += kDownsamplingFactor) { + memcpy(y_sorted + sort_row_idx * width, + frame->buffer(kYPlane) + i * width, width); + sort_row_idx++; + } - // Initialize the history with a uniformly distributed histogram - _quantHistUW8[0][0] = 0; - _quantHistUW8[0][kNumQuants - 1] = 255; - for (int32_t i = 0; i < kNumProbs; i++) - { - _quantHistUW8[0][i + 1] = static_cast((WEBRTC_SPL_UMUL_16_16( - _probUW16[i], 255) + (1 << 10)) >> 11); // Unsigned round. + webrtc::Sort(y_sorted, y_sub_size, webrtc::TYPE_UWord8); + + uint32_t prob_idx_uw32 = 0; + quant_uw8[0] = 0; + quant_uw8[kNumQuants - 1] = 255; + + // Ensure we won't get an overflow below. + // In practice, the number of subsampled pixels will not become this large. + if (y_sub_size > (1 << 21) - 1) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_, + "Subsampled number of pixels too large"); + return -1; + } + + for (int32_t i = 0; i < kNumProbs; i++) { + // . + prob_idx_uw32 = WEBRTC_SPL_UMUL_32_16(y_sub_size, prob_uw16_[i]) >> 11; + quant_uw8[i + 1] = y_sorted[prob_idx_uw32]; + } + + delete [] y_sorted; + y_sorted = NULL; + + // Shift history for new frame. + memmove(quant_hist_uw8_[1], quant_hist_uw8_[0], + (kFrameHistory_size - 1) * kNumQuants * sizeof(uint8_t)); + // Store current frame in history. + memcpy(quant_hist_uw8_[0], quant_uw8, kNumQuants * sizeof(uint8_t)); + + // We use a frame memory equal to the ceiling of half the frame rate to + // ensure we capture an entire period of flicker. + frame_memory = (frame_rate_ + (1 << 5)) >> 5; // Unsigned ceiling. + // frame_rate_ in Q4. + if (frame_memory > kFrameHistory_size) { + frame_memory = kFrameHistory_size; + } + + // Get maximum and minimum. + for (int32_t i = 0; i < kNumQuants; i++) { + maxquant_uw8[i] = 0; + minquant_uw8[i] = 255; + for (uint32_t j = 0; j < frame_memory; j++) { + if (quant_hist_uw8_[j][i] > maxquant_uw8[i]) { + maxquant_uw8[i] = quant_hist_uw8_[j][i]; + } + + if (quant_hist_uw8_[j][i] < minquant_uw8[i]) { + minquant_uw8[i] = quant_hist_uw8_[j][i]; + } } - - for (int32_t i = 1; i < kFrameHistorySize; i++) - { - memcpy(_quantHistUW8[i], _quantHistUW8[0], sizeof(uint8_t) * kNumQuants); - } -} + } -int32_t -VPMDeflickering::ProcessFrame(I420VideoFrame* frame, - VideoProcessingModule::FrameStats* stats) -{ - assert(frame); - uint32_t frameMemory; - uint8_t quantUW8[kNumQuants]; - uint8_t maxQuantUW8[kNumQuants]; - uint8_t minQuantUW8[kNumQuants]; - uint16_t targetQuantUW16[kNumQuants]; - uint16_t incrementUW16; - uint8_t mapUW8[256]; + // Get target quantiles. + for (int32_t i = 0; i < kNumQuants - kMaxOnlyLength; i++) { + target_quant_uw16[i] = static_cast((WEBRTC_SPL_UMUL_16_16( + weight_uw16_[i], maxquant_uw8[i]) + WEBRTC_SPL_UMUL_16_16((1 << 15) - + weight_uw16_[i], minquant_uw8[i])) >> 8); // + } - uint16_t tmpUW16; - uint32_t tmpUW32; - int width = frame->width(); - int height = frame->height(); + for (int32_t i = kNumQuants - kMaxOnlyLength; i < kNumQuants; i++) { + target_quant_uw16[i] = ((uint16_t)maxquant_uw8[i]) << 7; + } - if (frame->IsZeroSize()) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, - "Null frame pointer"); - return VPM_GENERAL_ERROR; + // Compute the map from input to output pixels. + uint16_t mapUW16; // + for (int32_t i = 1; i < kNumQuants; i++) { + // As quant and targetQuant are limited to UWord8, it's safe to use Q7 here. + tmp_uw32 = static_cast(target_quant_uw16[i] - + target_quant_uw16[i - 1]); + tmp_uw16 = static_cast(quant_uw8[i] - quant_uw8[i - 1]); // + + if (tmp_uw16 > 0) { + increment_uw16 = static_cast(WebRtcSpl_DivU32U16(tmp_uw32, + tmp_uw16)); // + } else { + // The value is irrelevant; the loop below will only iterate once. + increment_uw16 = 0; } - // Stricter height check due to subsampling size calculation below. - if (height < 2) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, - "Invalid frame size"); - return VPM_GENERAL_ERROR; + mapUW16 = target_quant_uw16[i - 1]; + for (uint32_t j = quant_uw8[i - 1]; j < (uint32_t)(quant_uw8[i] + 1); j++) { + // Unsigned round. + map_uw8[j] = (uint8_t)((mapUW16 + (1 << 6)) >> 7); + mapUW16 += increment_uw16; } + } - if (!VideoProcessingModule::ValidFrameStats(*stats)) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, - "Invalid frame stats"); - return VPM_GENERAL_ERROR; - } + // Map to the output frame. + uint8_t* buffer = frame->buffer(kYPlane); + for (uint32_t i = 0; i < y_size; i++) { + buffer[i] = map_uw8[buffer[i]]; + } - if (PreDetection(frame->timestamp(), *stats) == -1) - { - return VPM_GENERAL_ERROR; - } + // Frame was altered, so reset stats. + VideoProcessingModule::ClearFrameStats(stats); - // Flicker detection - int32_t detFlicker = DetectFlicker(); - if (detFlicker < 0) - { // Error - return VPM_GENERAL_ERROR; - } - else if (detFlicker != 1) - { - return 0; - } - - // Size of luminance component - const uint32_t ySize = height * width; - - const uint32_t ySubSize = width * (((height - 1) >> - kLog2OfDownsamplingFactor) + 1); - uint8_t* ySorted = new uint8_t[ySubSize]; - uint32_t sortRowIdx = 0; - for (int i = 0; i < height; i += kDownsamplingFactor) - { - memcpy(ySorted + sortRowIdx * width, - frame->buffer(kYPlane) + i * width, width); - sortRowIdx++; - } - - webrtc::Sort(ySorted, ySubSize, webrtc::TYPE_UWord8); - - uint32_t probIdxUW32 = 0; - quantUW8[0] = 0; - quantUW8[kNumQuants - 1] = 255; - - // Ensure we won't get an overflow below. - // In practice, the number of subsampled pixels will not become this large. - if (ySubSize > (1 << 21) - 1) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, - "Subsampled number of pixels too large"); - return -1; - } - - for (int32_t i = 0; i < kNumProbs; i++) - { - probIdxUW32 = WEBRTC_SPL_UMUL_32_16(ySubSize, _probUW16[i]) >> 11; // - quantUW8[i + 1] = ySorted[probIdxUW32]; - } - - delete [] ySorted; - ySorted = NULL; - - // Shift history for new frame. - memmove(_quantHistUW8[1], _quantHistUW8[0], (kFrameHistorySize - 1) * kNumQuants * - sizeof(uint8_t)); - // Store current frame in history. - memcpy(_quantHistUW8[0], quantUW8, kNumQuants * sizeof(uint8_t)); - - // We use a frame memory equal to the ceiling of half the frame rate to ensure we - // capture an entire period of flicker. - frameMemory = (_frameRate + (1 << 5)) >> 5; // Unsigned ceiling. - // _frameRate in Q4. - if (frameMemory > kFrameHistorySize) - { - frameMemory = kFrameHistorySize; - } - - // Get maximum and minimum. - for (int32_t i = 0; i < kNumQuants; i++) - { - maxQuantUW8[i] = 0; - minQuantUW8[i] = 255; - for (uint32_t j = 0; j < frameMemory; j++) - { - if (_quantHistUW8[j][i] > maxQuantUW8[i]) - { - maxQuantUW8[i] = _quantHistUW8[j][i]; - } - - if (_quantHistUW8[j][i] < minQuantUW8[i]) - { - minQuantUW8[i] = _quantHistUW8[j][i]; - } - } - } - - // Get target quantiles. - for (int32_t i = 0; i < kNumQuants - kMaxOnlyLength; i++) - { - targetQuantUW16[i] = static_cast((WEBRTC_SPL_UMUL_16_16( - _weightUW16[i], maxQuantUW8[i]) + WEBRTC_SPL_UMUL_16_16((1 << 15) - - _weightUW16[i], minQuantUW8[i])) >> 8); // - } - - for (int32_t i = kNumQuants - kMaxOnlyLength; i < kNumQuants; i++) - { - targetQuantUW16[i] = ((uint16_t)maxQuantUW8[i]) << 7; - } - - // Compute the map from input to output pixels. - uint16_t mapUW16; // - for (int32_t i = 1; i < kNumQuants; i++) - { - // As quant and targetQuant are limited to UWord8, we're safe to use Q7 here. - tmpUW32 = static_cast(targetQuantUW16[i] - - targetQuantUW16[i - 1]); // - tmpUW16 = static_cast(quantUW8[i] - quantUW8[i - 1]); // - - if (tmpUW16 > 0) - { - incrementUW16 = static_cast(WebRtcSpl_DivU32U16(tmpUW32, - tmpUW16)); // - } - else - { - // The value is irrelevant; the loop below will only iterate once. - incrementUW16 = 0; - } - - mapUW16 = targetQuantUW16[i - 1]; - for (uint32_t j = quantUW8[i - 1]; j < (uint32_t)(quantUW8[i] + 1); j++) - { - mapUW8[j] = (uint8_t)((mapUW16 + (1 << 6)) >> 7); // Unsigned round. - mapUW16 += incrementUW16; - } - } - - // Map to the output frame. - uint8_t* buffer = frame->buffer(kYPlane); - for (uint32_t i = 0; i < ySize; i++) - { - buffer[i] = mapUW8[buffer[i]]; - } - - // Frame was altered, so reset stats. - VideoProcessingModule::ClearFrameStats(stats); - - return 0; + return VPM_OK; } /** - Performs some pre-detection operations. Must be called before + Performs some pre-detection operations. Must be called before DetectFlicker(). \param[in] timestamp Timestamp of the current frame. \param[in] stats Statistics of the current frame. - + \return 0: Success\n 2: Detection not possible due to flickering frequency too close to zero.\n -1: Error */ -int32_t -VPMDeflickering::PreDetection(const uint32_t timestamp, - const VideoProcessingModule::FrameStats& stats) -{ - int32_t meanVal; // Mean value of frame (Q4) - uint32_t frameRate = 0; - int32_t meanBufferLength; // Temp variable +int32_t VPMDeflickering::PreDetection(const uint32_t timestamp, + const VideoProcessingModule::FrameStats& stats) { + int32_t mean_val; // Mean value of frame (Q4) + uint32_t frame_rate = 0; + int32_t meanBufferLength; // Temp variable. - meanVal = ((stats.sum << kMeanValueScaling) / stats.numPixels); - /* Update mean value buffer. - * This should be done even though we might end up in an unreliable detection. + mean_val = ((stats.sum << kmean_valueScaling) / stats.num_pixels); + // Update mean value buffer. + // This should be done even though we might end up in an unreliable detection. + memmove(mean_buffer_ + 1, mean_buffer_, + (kMeanBufferLength - 1) * sizeof(int32_t)); + mean_buffer_[0] = mean_val; + + // Update timestamp buffer. + // This should be done even though we might end up in an unreliable detection. + memmove(timestamp_buffer_ + 1, timestamp_buffer_, (kMeanBufferLength - 1) * + sizeof(uint32_t)); + timestamp_buffer_[0] = timestamp; + +/* Compute current frame rate (Q4) */ + if (timestamp_buffer_[kMeanBufferLength - 1] != 0) { + frame_rate = ((90000 << 4) * (kMeanBufferLength - 1)); + frame_rate /= + (timestamp_buffer_[0] - timestamp_buffer_[kMeanBufferLength - 1]); + } else if (timestamp_buffer_[1] != 0) { + frame_rate = (90000 << 4) / (timestamp_buffer_[0] - timestamp_buffer_[1]); + } + + /* Determine required size of mean value buffer (mean_buffer_length_) */ + if (frame_rate == 0) { + meanBufferLength = 1; + } else { + meanBufferLength = + (kNumFlickerBeforeDetect * frame_rate) / kMinFrequencyToDetect; + } + /* Sanity check of buffer length */ + if (meanBufferLength >= kMeanBufferLength) { + /* Too long buffer. The flickering frequency is too close to zero, which + * makes the estimation unreliable. */ - memmove(_meanBuffer + 1, _meanBuffer, (kMeanBufferLength - 1) * sizeof(int32_t)); - _meanBuffer[0] = meanVal; + mean_buffer_length_ = 0; + return 2; + } + mean_buffer_length_ = meanBufferLength; - /* Update timestamp buffer. - * This should be done even though we might end up in an unreliable detection. - */ - memmove(_timestampBuffer + 1, _timestampBuffer, (kMeanBufferLength - 1) * - sizeof(uint32_t)); - _timestampBuffer[0] = timestamp; + if ((timestamp_buffer_[mean_buffer_length_ - 1] != 0) && + (mean_buffer_length_ != 1)) { + frame_rate = ((90000 << 4) * (mean_buffer_length_ - 1)); + frame_rate /= + (timestamp_buffer_[0] - timestamp_buffer_[mean_buffer_length_ - 1]); + } else if (timestamp_buffer_[1] != 0) { + frame_rate = (90000 << 4) / (timestamp_buffer_[0] - timestamp_buffer_[1]); + } + frame_rate_ = frame_rate; - /* Compute current frame rate (Q4) */ - if (_timestampBuffer[kMeanBufferLength - 1] != 0) - { - frameRate = ((90000 << 4) * (kMeanBufferLength - 1)); - frameRate /= (_timestampBuffer[0] - _timestampBuffer[kMeanBufferLength - 1]); - }else if (_timestampBuffer[1] != 0) - { - frameRate = (90000 << 4) / (_timestampBuffer[0] - _timestampBuffer[1]); - } - - /* Determine required size of mean value buffer (_meanBufferLength) */ - if (frameRate == 0) { - meanBufferLength = 1; - } - else { - meanBufferLength = (kNumFlickerBeforeDetect * frameRate) / kMinFrequencyToDetect; - } - /* Sanity check of buffer length */ - if (meanBufferLength >= kMeanBufferLength) - { - /* Too long buffer. The flickering frequency is too close to zero, which - * makes the estimation unreliable. - */ - _meanBufferLength = 0; - return 2; - } - _meanBufferLength = meanBufferLength; - - if ((_timestampBuffer[_meanBufferLength - 1] != 0) && (_meanBufferLength != 1)) - { - frameRate = ((90000 << 4) * (_meanBufferLength - 1)); - frameRate /= (_timestampBuffer[0] - _timestampBuffer[_meanBufferLength - 1]); - }else if (_timestampBuffer[1] != 0) - { - frameRate = (90000 << 4) / (_timestampBuffer[0] - _timestampBuffer[1]); - } - _frameRate = frameRate; - - return 0; + return VPM_OK; } /** - This function detects flicker in the video stream. As a side effect the mean value - buffer is updated with the new mean value. - + This function detects flicker in the video stream. As a side effect the + mean value buffer is updated with the new mean value. + \return 0: No flickering detected\n 1: Flickering detected\n 2: Detection not possible due to unreliable frequency interval -1: Error */ -int32_t VPMDeflickering::DetectFlicker() -{ - /* Local variables */ - uint32_t i; - int32_t freqEst; // (Q4) Frequency estimate to base detection upon - int32_t retVal = -1; +int32_t VPMDeflickering::DetectFlicker() { + uint32_t i; + int32_t freqEst; // (Q4) Frequency estimate to base detection upon + int32_t ret_val = -1; - /* Sanity check for _meanBufferLength */ - if (_meanBufferLength < 2) - { - /* Not possible to estimate frequency */ - return(2); - } - /* Count zero crossings with a dead zone to be robust against noise. - * If the noise std is 2 pixel this corresponds to about 95% confidence interval. - */ - int32_t deadzone = (kZeroCrossingDeadzone << kMeanValueScaling); // Q4 - int32_t meanOfBuffer = 0; // Mean value of mean value buffer - int32_t numZeros = 0; // Number of zeros that cross the deadzone - int32_t cntState = 0; // State variable for zero crossing regions - int32_t cntStateOld = 0; // Previous state variable for zero crossing regions + /* Sanity check for mean_buffer_length_ */ + if (mean_buffer_length_ < 2) { + /* Not possible to estimate frequency */ + return(2); + } + // Count zero crossings with a dead zone to be robust against noise. If the + // noise std is 2 pixel this corresponds to about 95% confidence interval. + int32_t deadzone = (kZeroCrossingDeadzone << kmean_valueScaling); // Q4 + int32_t meanOfBuffer = 0; // Mean value of mean value buffer. + int32_t numZeros = 0; // Number of zeros that cross the dead-zone. + int32_t cntState = 0; // State variable for zero crossing regions. + int32_t cntStateOld = 0; // Previous state for zero crossing regions. - for (i = 0; i < _meanBufferLength; i++) - { - meanOfBuffer += _meanBuffer[i]; - } - meanOfBuffer += (_meanBufferLength >> 1); // Rounding, not truncation - meanOfBuffer /= _meanBufferLength; + for (i = 0; i < mean_buffer_length_; i++) { + meanOfBuffer += mean_buffer_[i]; + } + meanOfBuffer += (mean_buffer_length_ >> 1); // Rounding, not truncation. + meanOfBuffer /= mean_buffer_length_; - /* Count zero crossings */ - cntStateOld = (_meanBuffer[0] >= (meanOfBuffer + deadzone)); - cntStateOld -= (_meanBuffer[0] <= (meanOfBuffer - deadzone)); - for (i = 1; i < _meanBufferLength; i++) - { - cntState = (_meanBuffer[i] >= (meanOfBuffer + deadzone)); - cntState -= (_meanBuffer[i] <= (meanOfBuffer - deadzone)); - if (cntStateOld == 0) - { - cntStateOld = -cntState; - } - if (((cntState + cntStateOld) == 0) && (cntState != 0)) - { - numZeros++; - cntStateOld = cntState; - } + // Count zero crossings. + cntStateOld = (mean_buffer_[0] >= (meanOfBuffer + deadzone)); + cntStateOld -= (mean_buffer_[0] <= (meanOfBuffer - deadzone)); + for (i = 1; i < mean_buffer_length_; i++) { + cntState = (mean_buffer_[i] >= (meanOfBuffer + deadzone)); + cntState -= (mean_buffer_[i] <= (meanOfBuffer - deadzone)); + if (cntStateOld == 0) { + cntStateOld = -cntState; } - /* END count zero crossings */ + if (((cntState + cntStateOld) == 0) && (cntState != 0)) { + numZeros++; + cntStateOld = cntState; + } + } + // END count zero crossings. - /* Frequency estimation according to: - * freqEst = numZeros * frameRate / 2 / _meanBufferLength; - * - * Resolution is set to Q4 - */ - freqEst = ((numZeros * 90000) << 3); - freqEst /= (_timestampBuffer[0] - _timestampBuffer[_meanBufferLength - 1]); + /* Frequency estimation according to: + * freqEst = numZeros * frame_rate / 2 / mean_buffer_length_; + * + * Resolution is set to Q4 + */ + freqEst = ((numZeros * 90000) << 3); + freqEst /= + (timestamp_buffer_[0] - timestamp_buffer_[mean_buffer_length_ - 1]); - /* Translate frequency estimate to regions close to 100 and 120 Hz */ - uint8_t freqState = 0; // Current translation state; - // (0) Not in interval, - // (1) Within valid interval, - // (2) Out of range - int32_t freqAlias = freqEst; - if (freqEst > kMinFrequencyToDetect) - { - uint8_t aliasState = 1; - while(freqState == 0) - { - /* Increase frequency */ - freqAlias += (aliasState * _frameRate); - freqAlias += ((freqEst << 1) * (1 - (aliasState << 1))); - /* Compute state */ - freqState = (abs(freqAlias - (100 << 4)) <= kFrequencyDeviation); - freqState += (abs(freqAlias - (120 << 4)) <= kFrequencyDeviation); - freqState += 2 * (freqAlias > ((120 << 4) + kFrequencyDeviation)); - /* Switch alias state */ - aliasState++; - aliasState &= 0x01; - } + /* Translate frequency estimate to regions close to 100 and 120 Hz */ + uint8_t freqState = 0; // Current translation state; + // (0) Not in interval, + // (1) Within valid interval, + // (2) Out of range + int32_t freqAlias = freqEst; + if (freqEst > kMinFrequencyToDetect) { + uint8_t aliasState = 1; + while(freqState == 0) { + /* Increase frequency */ + freqAlias += (aliasState * frame_rate_); + freqAlias += ((freqEst << 1) * (1 - (aliasState << 1))); + /* Compute state */ + freqState = (abs(freqAlias - (100 << 4)) <= kFrequencyDeviation); + freqState += (abs(freqAlias - (120 << 4)) <= kFrequencyDeviation); + freqState += 2 * (freqAlias > ((120 << 4) + kFrequencyDeviation)); + /* Switch alias state */ + aliasState++; + aliasState &= 0x01; } - /* Is frequency estimate within detection region? */ - if (freqState == 1) - { - retVal = 1; - }else if (freqState == 0) - { - retVal = 2; - }else - { - retVal = 0; - } - return retVal; + } + /* Is frequency estimate within detection region? */ + if (freqState == 1) { + ret_val = 1; + } else if (freqState == 0) { + ret_val = 2; + } else { + ret_val = 0; + } + return ret_val; } -} // namespace +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/deflickering.h b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/deflickering.h index d3f8139bef79..53ffebed5fe0 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/deflickering.h +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/deflickering.h @@ -8,12 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -/* - * deflickering.h - */ - -#ifndef VPM_DEFLICKERING_H -#define VPM_DEFLICKERING_H +#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCEdeflickering__H +#define WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCEdeflickering__H #include // NULL @@ -22,44 +18,43 @@ namespace webrtc { -class VPMDeflickering -{ -public: - VPMDeflickering(); - ~VPMDeflickering(); +class VPMDeflickering { + public: + VPMDeflickering(); + ~VPMDeflickering(); - int32_t ChangeUniqueId(int32_t id); + int32_t ChangeUniqueId(int32_t id); - void Reset(); + void Reset(); + int32_t ProcessFrame(I420VideoFrame* frame, + VideoProcessingModule::FrameStats* stats); - int32_t ProcessFrame(I420VideoFrame* frame, - VideoProcessingModule::FrameStats* stats); -private: - int32_t PreDetection(uint32_t timestamp, - const VideoProcessingModule::FrameStats& stats); + private: + int32_t PreDetection(uint32_t timestamp, + const VideoProcessingModule::FrameStats& stats); - int32_t DetectFlicker(); + int32_t DetectFlicker(); - enum { kMeanBufferLength = 32 }; - enum { kFrameHistorySize = 15 }; - enum { kNumProbs = 12 }; - enum { kNumQuants = kNumProbs + 2 }; - enum { kMaxOnlyLength = 5 }; + enum { kMeanBufferLength = 32 }; + enum { kFrameHistory_size = 15 }; + enum { kNumProbs = 12 }; + enum { kNumQuants = kNumProbs + 2 }; + enum { kMaxOnlyLength = 5 }; - int32_t _id; + int32_t id_; - uint32_t _meanBufferLength; - uint8_t _detectionState; // 0: No flickering - // 1: Flickering detected - // 2: In flickering - int32_t _meanBuffer[kMeanBufferLength]; - uint32_t _timestampBuffer[kMeanBufferLength]; - uint32_t _frameRate; - static const uint16_t _probUW16[kNumProbs]; - static const uint16_t _weightUW16[kNumQuants - kMaxOnlyLength]; - uint8_t _quantHistUW8[kFrameHistorySize][kNumQuants]; + uint32_t mean_buffer_length_; + uint8_t detection_state_; // 0: No flickering + // 1: Flickering detected + // 2: In flickering + int32_t mean_buffer_[kMeanBufferLength]; + uint32_t timestamp_buffer_[kMeanBufferLength]; + uint32_t frame_rate_; + static const uint16_t prob_uw16_[kNumProbs]; + static const uint16_t weight_uw16_[kNumQuants - kMaxOnlyLength]; + uint8_t quant_hist_uw8_[kFrameHistory_size][kNumQuants]; }; -} // namespace +} // namespace webrtc -#endif // VPM_DEFLICKERING_H +#endif // WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCEdeflickering__H diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/denoising.cc b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/denoising.cc index 45326fa54581..79c4bcc3d1bc 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/denoising.cc +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/denoising.cc @@ -14,167 +14,146 @@ #include namespace webrtc { +// Down-sampling in time (unit: number of frames) +enum { kSubsamplingTime = 0 }; +// Sub-sampling in width (unit: power of 2. +enum { kSubsamplingWidth = 0 }; +// Sub-sampling in height (unit: power of 2) +enum { kSubsamplingHeight = 0 }; +// (Q8) De-noising filter parameter +enum { kDenoiseFiltParam = 179 }; +// (Q8) 1 - filter parameter +enum { kDenoiseFiltParamRec = 77 }; +// (Q8) De-noising threshold level +enum { kDenoiseThreshold = 19200 }; -enum { kSubsamplingTime = 0 }; // Down-sampling in time (unit: number of frames) -enum { kSubsamplingWidth = 0 }; // Sub-sampling in width (unit: power of 2) -enum { kSubsamplingHeight = 0 }; // Sub-sampling in height (unit: power of 2) -enum { kDenoiseFiltParam = 179 }; // (Q8) De-noising filter parameter -enum { kDenoiseFiltParamRec = 77 }; // (Q8) 1 - filter parameter -enum { kDenoiseThreshold = 19200 }; // (Q8) De-noising threshold level - -VPMDenoising::VPMDenoising() : - _id(0), - _moment1(NULL), - _moment2(NULL) -{ - Reset(); +VPMDenoising::VPMDenoising() + : id_(0), + moment1_(NULL), + moment2_(NULL) { + Reset(); } -VPMDenoising::~VPMDenoising() -{ - if (_moment1) - { - delete [] _moment1; - _moment1 = NULL; - } - - if (_moment2) - { - delete [] _moment2; - _moment2 = NULL; - } +VPMDenoising::~VPMDenoising() { + if (moment1_) { + delete [] moment1_; + moment1_ = NULL; } -int32_t -VPMDenoising::ChangeUniqueId(const int32_t id) -{ - _id = id; - return VPM_OK; + if (moment2_) { + delete [] moment2_; + moment2_ = NULL; + } } -void -VPMDenoising::Reset() -{ - _frameSize = 0; - _denoiseFrameCnt = 0; - - if (_moment1) - { - delete [] _moment1; - _moment1 = NULL; - } - - if (_moment2) - { - delete [] _moment2; - _moment2 = NULL; - } +int32_t VPMDenoising::ChangeUniqueId(const int32_t id) { + id_ = id; + return VPM_OK; } -int32_t -VPMDenoising::ProcessFrame(I420VideoFrame* frame) -{ - assert(frame); - int32_t thevar; - int k; - int jsub, ksub; - int32_t diff0; - uint32_t tmpMoment1; - uint32_t tmpMoment2; - uint32_t tmp; - int32_t numPixelsChanged = 0; +void VPMDenoising::Reset() { + frame_size_ = 0; + denoise_frame_cnt_ = 0; - if (frame->IsZeroSize()) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, - "zero size frame"); - return VPM_GENERAL_ERROR; + if (moment1_) { + delete [] moment1_; + moment1_ = NULL; + } + + if (moment2_) { + delete [] moment2_; + moment2_ = NULL; + } +} + +int32_t VPMDenoising::ProcessFrame(I420VideoFrame* frame) { + assert(frame); + int32_t thevar; + int k; + int jsub, ksub; + int32_t diff0; + uint32_t tmp_moment1; + uint32_t tmp_moment2; + uint32_t tmp; + int32_t num_pixels_changed = 0; + + if (frame->IsZeroSize()) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_, + "zero size frame"); + return VPM_GENERAL_ERROR; + } + + int width = frame->width(); + int height = frame->height(); + + /* Size of luminance component */ + const uint32_t y_size = height * width; + + /* Initialization */ + if (y_size != frame_size_) { + delete [] moment1_; + moment1_ = NULL; + + delete [] moment2_; + moment2_ = NULL; + } + frame_size_ = y_size; + + if (!moment1_) { + moment1_ = new uint32_t[y_size]; + memset(moment1_, 0, sizeof(uint32_t)*y_size); + } + + if (!moment2_) { + moment2_ = new uint32_t[y_size]; + memset(moment2_, 0, sizeof(uint32_t)*y_size); + } + + /* Apply de-noising on each pixel, but update variance sub-sampled */ + uint8_t* buffer = frame->buffer(kYPlane); + for (int i = 0; i < height; i++) { // Collect over height + k = i * width; + ksub = ((i >> kSubsamplingHeight) << kSubsamplingHeight) * width; + for (int j = 0; j < width; j++) { // Collect over width + jsub = ((j >> kSubsamplingWidth) << kSubsamplingWidth); + /* Update mean value for every pixel and every frame */ + tmp_moment1 = moment1_[k + j]; + tmp_moment1 *= kDenoiseFiltParam; // Q16 + tmp_moment1 += ((kDenoiseFiltParamRec * ((uint32_t)buffer[k + j])) << 8); + tmp_moment1 >>= 8; // Q8 + moment1_[k + j] = tmp_moment1; + + tmp_moment2 = moment2_[ksub + jsub]; + if ((ksub == k) && (jsub == j) && (denoise_frame_cnt_ == 0)) { + tmp = ((uint32_t)buffer[k + j] * + (uint32_t)buffer[k + j]); + tmp_moment2 *= kDenoiseFiltParam; // Q16 + tmp_moment2 += ((kDenoiseFiltParamRec * tmp) << 8); + tmp_moment2 >>= 8; // Q8 + } + moment2_[k + j] = tmp_moment2; + /* Current event = deviation from mean value */ + diff0 = ((int32_t)buffer[k + j] << 8) - moment1_[k + j]; + /* Recent events = variance (variations over time) */ + thevar = moment2_[k + j]; + thevar -= ((moment1_[k + j] * moment1_[k + j]) >> 8); + // De-noising criteria, i.e., when should we replace a pixel by its mean. + // 1) recent events are minor. + // 2) current events are minor. + if ((thevar < kDenoiseThreshold) + && ((diff0 * diff0 >> 8) < kDenoiseThreshold)) { + // Replace with mean. + buffer[k + j] = (uint8_t)(moment1_[k + j] >> 8); + num_pixels_changed++; + } } + } - int width = frame->width(); - int height = frame->height(); + denoise_frame_cnt_++; + if (denoise_frame_cnt_ > kSubsamplingTime) + denoise_frame_cnt_ = 0; - /* Size of luminance component */ - const uint32_t ysize = height * width; - - /* Initialization */ - if (ysize != _frameSize) - { - delete [] _moment1; - _moment1 = NULL; - - delete [] _moment2; - _moment2 = NULL; - } - _frameSize = ysize; - - if (!_moment1) - { - _moment1 = new uint32_t[ysize]; - memset(_moment1, 0, sizeof(uint32_t)*ysize); - } - - if (!_moment2) - { - _moment2 = new uint32_t[ysize]; - memset(_moment2, 0, sizeof(uint32_t)*ysize); - } - - /* Apply de-noising on each pixel, but update variance sub-sampled */ - uint8_t* buffer = frame->buffer(kYPlane); - for (int i = 0; i < height; i++) - { // Collect over height - k = i * width; - ksub = ((i >> kSubsamplingHeight) << kSubsamplingHeight) * width; - for (int j = 0; j < width; j++) - { // Collect over width - jsub = ((j >> kSubsamplingWidth) << kSubsamplingWidth); - /* Update mean value for every pixel and every frame */ - tmpMoment1 = _moment1[k + j]; - tmpMoment1 *= kDenoiseFiltParam; // Q16 - tmpMoment1 += ((kDenoiseFiltParamRec * - ((uint32_t)buffer[k + j])) << 8); - tmpMoment1 >>= 8; // Q8 - _moment1[k + j] = tmpMoment1; - - tmpMoment2 = _moment2[ksub + jsub]; - if ((ksub == k) && (jsub == j) && (_denoiseFrameCnt == 0)) - { - tmp = ((uint32_t)buffer[k + j] * - (uint32_t)buffer[k + j]); - tmpMoment2 *= kDenoiseFiltParam; // Q16 - tmpMoment2 += ((kDenoiseFiltParamRec * tmp)<<8); - tmpMoment2 >>= 8; // Q8 - } - _moment2[k + j] = tmpMoment2; - /* Current event = deviation from mean value */ - diff0 = ((int32_t)buffer[k + j] << 8) - _moment1[k + j]; - /* Recent events = variance (variations over time) */ - thevar = _moment2[k + j]; - thevar -= ((_moment1[k + j] * _moment1[k + j]) >> 8); - /*************************************************************************** - * De-noising criteria, i.e., when should we replace a pixel by its mean - * - * 1) recent events are minor - * 2) current events are minor - ***************************************************************************/ - if ((thevar < kDenoiseThreshold) - && ((diff0 * diff0 >> 8) < kDenoiseThreshold)) - { // Replace with mean - buffer[k + j] = (uint8_t)(_moment1[k + j] >> 8); - numPixelsChanged++; - } - } - } - - /* Update frame counter */ - _denoiseFrameCnt++; - if (_denoiseFrameCnt > kSubsamplingTime) - { - _denoiseFrameCnt = 0; - } - - return numPixelsChanged; + return num_pixels_changed; } } // namespace diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/denoising.h b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/denoising.h index b9d09c0817f2..60645fbdbf88 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/denoising.h +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/denoising.h @@ -8,39 +8,35 @@ * be found in the AUTHORS file in the root of the source tree. */ -/* - * denoising.h - */ -#ifndef VPM_DENOISING_H -#define VPM_DENOISING_H +#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_DENOISING_H_ +#define WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_DENOISING_H_ #include "webrtc/modules/video_processing/main/interface/video_processing.h" #include "webrtc/typedefs.h" namespace webrtc { -class VPMDenoising -{ -public: - VPMDenoising(); - ~VPMDenoising(); +class VPMDenoising { + public: + VPMDenoising(); + ~VPMDenoising(); - int32_t ChangeUniqueId(int32_t id); + int32_t ChangeUniqueId(int32_t id); - void Reset(); + void Reset(); - int32_t ProcessFrame(I420VideoFrame* frame); + int32_t ProcessFrame(I420VideoFrame* frame); -private: - int32_t _id; + private: + int32_t id_; - uint32_t* _moment1; // (Q8) First order moment (mean) - uint32_t* _moment2; // (Q8) Second order moment - uint32_t _frameSize; // Size (# of pixels) of frame - int _denoiseFrameCnt; // Counter for subsampling in time + uint32_t* moment1_; // (Q8) First order moment (mean). + uint32_t* moment2_; // (Q8) Second order moment. + uint32_t frame_size_; // Size (# of pixels) of frame. + int denoise_frame_cnt_; // Counter for subsampling in time. }; -} // namespace +} // namespace webrtc + +#endif // WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_DENOISING_H_ -#endif // VPM_DENOISING_H - diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/frame_preprocessor.cc b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/frame_preprocessor.cc index fc952c0b821f..de4907029bcd 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/frame_preprocessor.cc +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/frame_preprocessor.cc @@ -13,177 +13,137 @@ namespace webrtc { -VPMFramePreprocessor::VPMFramePreprocessor(): -_id(0), -_contentMetrics(NULL), -_maxFrameRate(0), -_resampledFrame(), -_enableCA(false), -_frameCnt(0) -{ - _spatialResampler = new VPMSimpleSpatialResampler(); - _ca = new VPMContentAnalysis(true); - _vd = new VPMVideoDecimator(); +VPMFramePreprocessor::VPMFramePreprocessor() + : id_(0), + content_metrics_(NULL), + max_frame_rate_(0), + resampled_frame_(), + enable_ca_(false), + frame_cnt_(0) { + spatial_resampler_ = new VPMSimpleSpatialResampler(); + ca_ = new VPMContentAnalysis(true); + vd_ = new VPMVideoDecimator(); } -VPMFramePreprocessor::~VPMFramePreprocessor() -{ - Reset(); - delete _spatialResampler; - delete _ca; - delete _vd; +VPMFramePreprocessor::~VPMFramePreprocessor() { + Reset(); + delete spatial_resampler_; + delete ca_; + delete vd_; } -int32_t -VPMFramePreprocessor::ChangeUniqueId(const int32_t id) -{ - _id = id; - return VPM_OK; +int32_t VPMFramePreprocessor::ChangeUniqueId(const int32_t id) { + id_ = id; + return VPM_OK; } -void -VPMFramePreprocessor::Reset() -{ - _ca->Release(); - _vd->Reset(); - _contentMetrics = NULL; - _spatialResampler->Reset(); - _enableCA = false; - _frameCnt = 0; -} - - -void -VPMFramePreprocessor::EnableTemporalDecimation(bool enable) -{ - _vd->EnableTemporalDecimation(enable); -} -void -VPMFramePreprocessor::EnableContentAnalysis(bool enable) -{ - _enableCA = enable; +void VPMFramePreprocessor::Reset() { + ca_->Release(); + vd_->Reset(); + content_metrics_ = NULL; + spatial_resampler_->Reset(); + enable_ca_ = false; + frame_cnt_ = 0; } -void -VPMFramePreprocessor::SetInputFrameResampleMode(VideoFrameResampling resamplingMode) -{ - _spatialResampler->SetInputFrameResampleMode(resamplingMode); + +void VPMFramePreprocessor::EnableTemporalDecimation(bool enable) { + vd_->EnableTemporalDecimation(enable); } - -int32_t -VPMFramePreprocessor::SetMaxFrameRate(uint32_t maxFrameRate) -{ - if (maxFrameRate == 0) - { - return VPM_PARAMETER_ERROR; +void VPMFramePreprocessor::EnableContentAnalysis(bool enable) { + enable_ca_ = enable; +} + +void VPMFramePreprocessor::SetInputFrameResampleMode( + VideoFrameResampling resampling_mode) { + spatial_resampler_->SetInputFrameResampleMode(resampling_mode); +} + +int32_t VPMFramePreprocessor::SetMaxFramerate(uint32_t max_frame_rate) { + if (max_frame_rate == 0) return VPM_PARAMETER_ERROR; + + // Max allowed frame_rate. + max_frame_rate_ = max_frame_rate; + return vd_->SetMaxFramerate(max_frame_rate); +} + +int32_t VPMFramePreprocessor::SetTargetResolution( + uint32_t width, uint32_t height, uint32_t frame_rate) { + if ( (width == 0) || (height == 0) || (frame_rate == 0)) { + return VPM_PARAMETER_ERROR; + } + int32_t ret_val = 0; + ret_val = spatial_resampler_->SetTargetFrameSize(width, height); + + if (ret_val < 0) return ret_val; + + ret_val = vd_->SetTargetframe_rate(frame_rate); + if (ret_val < 0) return ret_val; + + return VPM_OK; +} + +void VPMFramePreprocessor::UpdateIncomingframe_rate() { + vd_->UpdateIncomingframe_rate(); +} + +uint32_t VPMFramePreprocessor::Decimatedframe_rate() { + return vd_->Decimatedframe_rate(); +} + + +uint32_t VPMFramePreprocessor::DecimatedWidth() const { + return spatial_resampler_->TargetWidth(); +} + + +uint32_t VPMFramePreprocessor::DecimatedHeight() const { + return spatial_resampler_->TargetHeight(); +} + + +int32_t VPMFramePreprocessor::PreprocessFrame(const I420VideoFrame& frame, + I420VideoFrame** processed_frame) { + if (frame.IsZeroSize()) { + return VPM_PARAMETER_ERROR; + } + + vd_->UpdateIncomingframe_rate(); + + if (vd_->DropFrame()) { + WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, id_, + "Drop frame due to frame rate"); + return 1; // drop 1 frame + } + + // Resizing incoming frame if needed. Otherwise, remains NULL. + // We are not allowed to resample the input frame (must make a copy of it). + *processed_frame = NULL; + if (spatial_resampler_->ApplyResample(frame.width(), frame.height())) { + int32_t ret = spatial_resampler_->ResampleFrame(frame, &resampled_frame_); + if (ret != VPM_OK) return ret; + *processed_frame = &resampled_frame_; + } + + // Perform content analysis on the frame to be encoded. + if (enable_ca_) { + // Compute new metrics every |kSkipFramesCA| frames, starting with + // the first frame. + if (frame_cnt_ % kSkipFrameCA == 0) { + if (*processed_frame == NULL) { + content_metrics_ = ca_->ComputeContentMetrics(frame); + } else { + content_metrics_ = ca_->ComputeContentMetrics(resampled_frame_); + } } - //Max allowed frame rate - _maxFrameRate = maxFrameRate; - - return _vd->SetMaxFrameRate(maxFrameRate); -} - - -int32_t -VPMFramePreprocessor::SetTargetResolution(uint32_t width, uint32_t height, uint32_t frameRate) -{ - if ( (width == 0) || (height == 0) || (frameRate == 0)) - { - return VPM_PARAMETER_ERROR; - } - int32_t retVal = 0; - retVal = _spatialResampler->SetTargetFrameSize(width, height); - if (retVal < 0) - { - return retVal; - } - retVal = _vd->SetTargetFrameRate(frameRate); - if (retVal < 0) - { - return retVal; - } - - return VPM_OK; + ++frame_cnt_; + } + return VPM_OK; } -void -VPMFramePreprocessor::UpdateIncomingFrameRate() -{ - _vd->UpdateIncomingFrameRate(); -} - -uint32_t -VPMFramePreprocessor::DecimatedFrameRate() -{ - return _vd->DecimatedFrameRate(); -} - - -uint32_t -VPMFramePreprocessor::DecimatedWidth() const -{ - return _spatialResampler->TargetWidth(); -} - - -uint32_t -VPMFramePreprocessor::DecimatedHeight() const -{ - return _spatialResampler->TargetHeight(); -} - - -int32_t -VPMFramePreprocessor::PreprocessFrame(const I420VideoFrame& frame, - I420VideoFrame** processedFrame) -{ - if (frame.IsZeroSize()) - { - return VPM_PARAMETER_ERROR; - } - - _vd->UpdateIncomingFrameRate(); - - if (_vd->DropFrame()) - { - WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, _id, - "Drop frame due to frame rate"); - return 1; // drop 1 frame - } - - // Resizing incoming frame if needed. Otherwise, remains NULL. - // We are not allowed to resample the input frame (must make a copy of it). - *processedFrame = NULL; - if (_spatialResampler->ApplyResample(frame.width(), frame.height())) { - int32_t ret = _spatialResampler->ResampleFrame(frame, &_resampledFrame); - if (ret != VPM_OK) - return ret; - *processedFrame = &_resampledFrame; - } - - // Perform content analysis on the frame to be encoded. - if (_enableCA) - { - // Compute new metrics every |kSkipFramesCA| frames, starting with - // the first frame. - if (_frameCnt % kSkipFrameCA == 0) { - if (*processedFrame == NULL) { - _contentMetrics = _ca->ComputeContentMetrics(frame); - } else { - _contentMetrics = _ca->ComputeContentMetrics(_resampledFrame); - } - } - ++_frameCnt; - } - return VPM_OK; -} - - -VideoContentMetrics* -VPMFramePreprocessor::ContentMetrics() const -{ - return _contentMetrics; +VideoContentMetrics* VPMFramePreprocessor::ContentMetrics() const { + return content_metrics_; } } // namespace diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/frame_preprocessor.h b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/frame_preprocessor.h index f2d94a239804..ca62d38fc6d2 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/frame_preprocessor.h +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/frame_preprocessor.h @@ -11,8 +11,8 @@ /* * frame_preprocessor.h */ -#ifndef VPM_FRAME_PREPROCESSOR_H -#define VPM_FRAME_PREPROCESSOR_H +#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_FRAME_PREPROCESSOR_H +#define WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_FRAME_PREPROCESSOR_H #include "webrtc/modules/video_processing/main/interface/video_processing.h" #include "webrtc/modules/video_processing/main/source/content_analysis.h" @@ -22,65 +22,62 @@ namespace webrtc { +class VPMFramePreprocessor { + public: + VPMFramePreprocessor(); + ~VPMFramePreprocessor(); -class VPMFramePreprocessor -{ -public: + int32_t ChangeUniqueId(const int32_t id); - VPMFramePreprocessor(); - ~VPMFramePreprocessor(); + void Reset(); - int32_t ChangeUniqueId(const int32_t id); + // Enable temporal decimation. + void EnableTemporalDecimation(bool enable); - void Reset(); + void SetInputFrameResampleMode(VideoFrameResampling resampling_mode); - // Enable temporal decimation - void EnableTemporalDecimation(bool enable); + // Enable content analysis. + void EnableContentAnalysis(bool enable); - void SetInputFrameResampleMode(VideoFrameResampling resamplingMode); + // Set max frame rate. + int32_t SetMaxFramerate(uint32_t max_frame_rate); - //Enable content analysis - void EnableContentAnalysis(bool enable); + // Set target resolution: frame rate and dimension. + int32_t SetTargetResolution(uint32_t width, uint32_t height, + uint32_t frame_rate); - //Set max frame rate - int32_t SetMaxFrameRate(uint32_t maxFrameRate); + // Update incoming frame rate/dimension. + void UpdateIncomingframe_rate(); - //Set target resolution: frame rate and dimension - int32_t SetTargetResolution(uint32_t width, uint32_t height, - uint32_t frameRate); + int32_t updateIncomingFrameSize(uint32_t width, uint32_t height); - //Update incoming frame rate/dimension - void UpdateIncomingFrameRate(); + // Set decimated values: frame rate/dimension. + uint32_t Decimatedframe_rate(); + uint32_t DecimatedWidth() const; + uint32_t DecimatedHeight() const; - int32_t updateIncomingFrameSize(uint32_t width, uint32_t height); + // Preprocess output: + int32_t PreprocessFrame(const I420VideoFrame& frame, + I420VideoFrame** processed_frame); + VideoContentMetrics* ContentMetrics() const; - //Set decimated values: frame rate/dimension - uint32_t DecimatedFrameRate(); - uint32_t DecimatedWidth() const; - uint32_t DecimatedHeight() const; + private: + // The content does not change so much every frame, so to reduce complexity + // we can compute new content metrics every |kSkipFrameCA| frames. + enum { kSkipFrameCA = 2 }; - //Preprocess output: - int32_t PreprocessFrame(const I420VideoFrame& frame, - I420VideoFrame** processedFrame); - VideoContentMetrics* ContentMetrics() const; + int32_t id_; + VideoContentMetrics* content_metrics_; + uint32_t max_frame_rate_; + I420VideoFrame resampled_frame_; + VPMSpatialResampler* spatial_resampler_; + VPMContentAnalysis* ca_; + VPMVideoDecimator* vd_; + bool enable_ca_; + int frame_cnt_; -private: - // The content does not change so much every frame, so to reduce complexity - // we can compute new content metrics every |kSkipFrameCA| frames. - enum { kSkipFrameCA = 2 }; +}; - int32_t _id; - VideoContentMetrics* _contentMetrics; - uint32_t _maxFrameRate; - I420VideoFrame _resampledFrame; - VPMSpatialResampler* _spatialResampler; - VPMContentAnalysis* _ca; - VPMVideoDecimator* _vd; - bool _enableCA; - int _frameCnt; - -}; // end of VPMFramePreprocessor class definition +} // namespace webrtc -} // namespace - -#endif // VPM_FRAME_PREPROCESS_H +#endif // WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_FRAME_PREPROCESSOR_H diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/spatial_resampler.cc b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/spatial_resampler.cc index f66153da849a..fd90c8f76a4e 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/spatial_resampler.cc +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/spatial_resampler.cc @@ -14,109 +14,85 @@ namespace webrtc { VPMSimpleSpatialResampler::VPMSimpleSpatialResampler() -: -_resamplingMode(kFastRescaling), -_targetWidth(0), -_targetHeight(0), -_scaler() -{ -} + : resampling_mode_(kFastRescaling), + target_width_(0), + target_height_(0), + scaler_() {} -VPMSimpleSpatialResampler::~VPMSimpleSpatialResampler() -{ - // -} +VPMSimpleSpatialResampler::~VPMSimpleSpatialResampler() {} -int32_t -VPMSimpleSpatialResampler::SetTargetFrameSize(int32_t width, - int32_t height) -{ - if (_resamplingMode == kNoRescaling) { - return VPM_OK; - } +int32_t VPMSimpleSpatialResampler::SetTargetFrameSize(int32_t width, + int32_t height) { + if (resampling_mode_ == kNoRescaling) return VPM_OK; - if (width < 1 || height < 1) { - return VPM_PARAMETER_ERROR; - } + if (width < 1 || height < 1) return VPM_PARAMETER_ERROR; - _targetWidth = width; - _targetHeight = height; + target_width_ = width; + target_height_ = height; return VPM_OK; } -void -VPMSimpleSpatialResampler::SetInputFrameResampleMode(VideoFrameResampling - resamplingMode) -{ - _resamplingMode = resamplingMode; +void VPMSimpleSpatialResampler::SetInputFrameResampleMode( + VideoFrameResampling resampling_mode) { + resampling_mode_ = resampling_mode; } -void -VPMSimpleSpatialResampler::Reset() -{ - _resamplingMode = kFastRescaling; - _targetWidth = 0; - _targetHeight = 0; +void VPMSimpleSpatialResampler::Reset() { + resampling_mode_ = kFastRescaling; + target_width_ = 0; + target_height_ = 0; } -int32_t -VPMSimpleSpatialResampler::ResampleFrame(const I420VideoFrame& inFrame, - I420VideoFrame* outFrame) -{ +int32_t VPMSimpleSpatialResampler::ResampleFrame(const I420VideoFrame& inFrame, + I420VideoFrame* outFrame) { // Don't copy if frame remains as is. - if (_resamplingMode == kNoRescaling) + if (resampling_mode_ == kNoRescaling) return VPM_OK; // Check if re-sampling is needed - else if ((inFrame.width() == _targetWidth) && - (inFrame.height() == _targetHeight)) { + else if ((inFrame.width() == target_width_) && + (inFrame.height() == target_height_)) { return VPM_OK; } // Setting scaler // TODO(mikhal/marpan): Should we allow for setting the filter mode in - // _scale.Set() with |_resamplingMode|? - int retVal = 0; - retVal = _scaler.Set(inFrame.width(), inFrame.height(), - _targetWidth, _targetHeight, kI420, kI420, kScaleBox); - if (retVal < 0) - return retVal; + // _scale.Set() with |resampling_mode_|? + int ret_val = 0; + ret_val = scaler_.Set(inFrame.width(), inFrame.height(), + target_width_, target_height_, kI420, kI420, kScaleBox); + if (ret_val < 0) + return ret_val; - retVal = _scaler.Scale(inFrame, outFrame); + ret_val = scaler_.Scale(inFrame, outFrame); // Setting time parameters to the output frame. // Timestamp will be reset in Scale call above, so we should set it after. outFrame->set_timestamp(inFrame.timestamp()); outFrame->set_render_time_ms(inFrame.render_time_ms()); - if (retVal == 0) + if (ret_val == 0) return VPM_OK; else return VPM_SCALE_ERROR; } -int32_t -VPMSimpleSpatialResampler::TargetHeight() -{ - return _targetHeight; +int32_t VPMSimpleSpatialResampler::TargetHeight() { + return target_height_; } -int32_t -VPMSimpleSpatialResampler::TargetWidth() -{ - return _targetWidth; +int32_t VPMSimpleSpatialResampler::TargetWidth() { + return target_width_; } -bool -VPMSimpleSpatialResampler::ApplyResample(int32_t width, - int32_t height) -{ - if ((width == _targetWidth && height == _targetHeight) || - _resamplingMode == kNoRescaling) +bool VPMSimpleSpatialResampler::ApplyResample(int32_t width, + int32_t height) { + if ((width == target_width_ && height == target_height_) || + resampling_mode_ == kNoRescaling) return false; else return true; } -} // namespace +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/spatial_resampler.h b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/spatial_resampler.h index 69d5cf886687..05247341d5cb 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/spatial_resampler.h +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/spatial_resampler.h @@ -8,12 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -/* - * spatial_resampler.h - */ - -#ifndef VPM_SPATIAL_RESAMPLER_H -#define VPM_SPATIAL_RESAMPLER_H +#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_SPATIAL_RESAMPLER_H +#define WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_SPATIAL_RESAMPLER_H #include "webrtc/typedefs.h" @@ -25,13 +21,12 @@ namespace webrtc { -class VPMSpatialResampler -{ -public: +class VPMSpatialResampler { + public: virtual ~VPMSpatialResampler() {}; virtual int32_t SetTargetFrameSize(int32_t width, int32_t height) = 0; virtual void SetInputFrameResampleMode(VideoFrameResampling - resamplingMode) = 0; + resampling_mode) = 0; virtual void Reset() = 0; virtual int32_t ResampleFrame(const I420VideoFrame& inFrame, I420VideoFrame* outFrame) = 0; @@ -40,13 +35,12 @@ public: virtual bool ApplyResample(int32_t width, int32_t height) = 0; }; -class VPMSimpleSpatialResampler : public VPMSpatialResampler -{ -public: +class VPMSimpleSpatialResampler : public VPMSpatialResampler { + public: VPMSimpleSpatialResampler(); ~VPMSimpleSpatialResampler(); virtual int32_t SetTargetFrameSize(int32_t width, int32_t height); - virtual void SetInputFrameResampleMode(VideoFrameResampling resamplingMode); + virtual void SetInputFrameResampleMode(VideoFrameResampling resampling_mode); virtual void Reset(); virtual int32_t ResampleFrame(const I420VideoFrame& inFrame, I420VideoFrame* outFrame); @@ -54,14 +48,14 @@ public: virtual int32_t TargetHeight(); virtual bool ApplyResample(int32_t width, int32_t height); -private: + private: - VideoFrameResampling _resamplingMode; - int32_t _targetWidth; - int32_t _targetHeight; - Scaler _scaler; + VideoFrameResampling resampling_mode_; + int32_t target_width_; + int32_t target_height_; + Scaler scaler_; }; -} // namespace +} // namespace webrtc -#endif +#endif // WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_SPATIAL_RESAMPLER_H diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_decimator.cc b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_decimator.cc index 84a6cad15e9a..8fd3d036919b 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_decimator.cc +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_decimator.cc @@ -17,219 +17,156 @@ namespace webrtc { VPMVideoDecimator::VPMVideoDecimator() -: -_overShootModifier(0), -_dropCount(0), -_keepCount(0), -_targetFrameRate(30), -_incomingFrameRate(0.0f), -_maxFrameRate(30), -_incomingFrameTimes(), -_enableTemporalDecimation(true) -{ - Reset(); + : overshoot_modifier_(0), + drop_count_(0), + keep_count_(0), + target_frame_rate_(30), + incoming_frame_rate_(0.0f), + max_frame_rate_(30), + incoming_frame_times_(), + enable_temporal_decimation_(true) { + Reset(); } -VPMVideoDecimator::~VPMVideoDecimator() -{ - // +VPMVideoDecimator::~VPMVideoDecimator() {} + +void VPMVideoDecimator::Reset() { + overshoot_modifier_ = 0; + drop_count_ = 0; + keep_count_ = 0; + target_frame_rate_ = 30; + incoming_frame_rate_ = 0.0f; + max_frame_rate_ = 30; + memset(incoming_frame_times_, 0, sizeof(incoming_frame_times_)); + enable_temporal_decimation_ = true; } -void -VPMVideoDecimator::Reset() -{ - _overShootModifier = 0; - _dropCount = 0; - _keepCount = 0; - _targetFrameRate = 30; - _incomingFrameRate = 0.0f; - _maxFrameRate = 30; - memset(_incomingFrameTimes, 0, sizeof(_incomingFrameTimes)); - _enableTemporalDecimation = true; +void VPMVideoDecimator::EnableTemporalDecimation(bool enable) { + enable_temporal_decimation_ = enable; } -void -VPMVideoDecimator::EnableTemporalDecimation(bool enable) -{ - _enableTemporalDecimation = enable; -} -int32_t -VPMVideoDecimator::SetMaxFrameRate(uint32_t maxFrameRate) -{ - if (maxFrameRate == 0) - { - return VPM_PARAMETER_ERROR; - } +int32_t VPMVideoDecimator::SetMaxFramerate(uint32_t max_frame_rate) { + if (max_frame_rate == 0) return VPM_PARAMETER_ERROR; - _maxFrameRate = maxFrameRate; - - if (_targetFrameRate > _maxFrameRate) - { - _targetFrameRate = _maxFrameRate; + max_frame_rate_ = max_frame_rate; - } - return VPM_OK; + if (target_frame_rate_ > max_frame_rate_) + target_frame_rate_ = max_frame_rate_; + + return VPM_OK; } -int32_t -VPMVideoDecimator::SetTargetFrameRate(uint32_t frameRate) -{ - if (frameRate == 0) - { - return VPM_PARAMETER_ERROR; - } - if (frameRate > _maxFrameRate) - { - //override - _targetFrameRate = _maxFrameRate; - } - else - { - _targetFrameRate = frameRate; - } - return VPM_OK; +int32_t VPMVideoDecimator::SetTargetframe_rate(uint32_t frame_rate) { + if (frame_rate == 0) return VPM_PARAMETER_ERROR; + + if (frame_rate > max_frame_rate_) { + // Override. + target_frame_rate_ = max_frame_rate_; + } else { + target_frame_rate_ = frame_rate; + } + return VPM_OK; } -bool -VPMVideoDecimator::DropFrame() -{ - if (!_enableTemporalDecimation) - { - return false; +bool VPMVideoDecimator::DropFrame() { + if (!enable_temporal_decimation_) return false; + + if (incoming_frame_rate_ <= 0) return false; + + const uint32_t incomingframe_rate = + static_cast(incoming_frame_rate_ + 0.5f); + + if (target_frame_rate_ == 0) return true; + + bool drop = false; + if (incomingframe_rate > target_frame_rate_) { + int32_t overshoot = + overshoot_modifier_ + (incomingframe_rate - target_frame_rate_); + if (overshoot < 0) { + overshoot = 0; + overshoot_modifier_ = 0; } - if (_incomingFrameRate <= 0) - { - return false; + if (overshoot && 2 * overshoot < (int32_t) incomingframe_rate) { + if (drop_count_) { // Just got here so drop to be sure. + drop_count_ = 0; + return true; + } + const uint32_t dropVar = incomingframe_rate / overshoot; + + if (keep_count_ >= dropVar) { + drop = true; + overshoot_modifier_ = -((int32_t) incomingframe_rate % overshoot) / 3; + keep_count_ = 1; + } else { + keep_count_++; + } + } else { + keep_count_ = 0; + const uint32_t dropVar = overshoot / target_frame_rate_; + if (drop_count_ < dropVar) { + drop = true; + drop_count_++; + } else { + overshoot_modifier_ = overshoot % target_frame_rate_; + drop = false; + drop_count_ = 0; + } } - - const uint32_t incomingFrameRate = static_cast(_incomingFrameRate + 0.5f); - - if (_targetFrameRate == 0) - { - return true; - } - - bool drop = false; - if (incomingFrameRate > _targetFrameRate) - { - int32_t overshoot = _overShootModifier + (incomingFrameRate - _targetFrameRate); - if(overshoot < 0) - { - overshoot = 0; - _overShootModifier = 0; - } - - if (overshoot && 2 * overshoot < (int32_t) incomingFrameRate) - { - - if (_dropCount) // Just got here so drop to be sure. - { - _dropCount = 0; - return true; - } - const uint32_t dropVar = incomingFrameRate / overshoot; - - if (_keepCount >= dropVar) - { - drop = true; - _overShootModifier = -((int32_t) incomingFrameRate % overshoot) / 3; - _keepCount = 1; - } - else - { - - _keepCount++; - } - } - else - { - _keepCount = 0; - const uint32_t dropVar = overshoot / _targetFrameRate; - if (_dropCount < dropVar) - { - drop = true; - _dropCount++; - } - else - { - _overShootModifier = overshoot % _targetFrameRate; - drop = false; - _dropCount = 0; - } - } - } - - return drop; + } + return drop; } -uint32_t -VPMVideoDecimator::DecimatedFrameRate() -{ - ProcessIncomingFrameRate(TickTime::MillisecondTimestamp()); - if (!_enableTemporalDecimation) - { - return static_cast(_incomingFrameRate + 0.5f); - } - return VD_MIN(_targetFrameRate, static_cast(_incomingFrameRate + 0.5f)); +uint32_t VPMVideoDecimator::Decimatedframe_rate() { +ProcessIncomingframe_rate(TickTime::MillisecondTimestamp()); + if (!enable_temporal_decimation_) { + return static_cast(incoming_frame_rate_ + 0.5f); + } + return VD_MIN(target_frame_rate_, + static_cast(incoming_frame_rate_ + 0.5f)); } -uint32_t -VPMVideoDecimator::InputFrameRate() -{ - ProcessIncomingFrameRate(TickTime::MillisecondTimestamp()); - return static_cast(_incomingFrameRate + 0.5f); +uint32_t VPMVideoDecimator::Inputframe_rate() { + ProcessIncomingframe_rate(TickTime::MillisecondTimestamp()); + return static_cast(incoming_frame_rate_ + 0.5f); } -void -VPMVideoDecimator::UpdateIncomingFrameRate() -{ - int64_t now = TickTime::MillisecondTimestamp(); - if(_incomingFrameTimes[0] == 0) - { - // first no shift - } else - { - // shift - for(int i = (kFrameCountHistorySize - 2); i >= 0 ; i--) - { - _incomingFrameTimes[i+1] = _incomingFrameTimes[i]; - } +void VPMVideoDecimator::UpdateIncomingframe_rate() { + int64_t now = TickTime::MillisecondTimestamp(); + if (incoming_frame_times_[0] == 0) { + // First no shift. + } else { + // Shift. + for (int i = kFrameCountHistory_size - 2; i >= 0; i--) { + incoming_frame_times_[i+1] = incoming_frame_times_[i]; } - _incomingFrameTimes[0] = now; - ProcessIncomingFrameRate(now); + } + incoming_frame_times_[0] = now; + ProcessIncomingframe_rate(now); } -void -VPMVideoDecimator::ProcessIncomingFrameRate(int64_t now) -{ - int32_t num = 0; - int32_t nrOfFrames = 0; - for(num = 1; num < (kFrameCountHistorySize - 1); num++) - { - if (_incomingFrameTimes[num] <= 0 || - now - _incomingFrameTimes[num] > kFrameHistoryWindowMs) // don't use data older than 2sec - { - break; - } else - { - nrOfFrames++; - } +void VPMVideoDecimator::ProcessIncomingframe_rate(int64_t now) { + int32_t num = 0; + int32_t nrOfFrames = 0; + for (num = 1; num < (kFrameCountHistory_size - 1); num++) { + // Don't use data older than 2sec. + if (incoming_frame_times_[num] <= 0 || + now - incoming_frame_times_[num] > kFrameHistoryWindowMs) { + break; + } else { + nrOfFrames++; } - if (num > 1) - { - int64_t diff = now - _incomingFrameTimes[num-1]; - _incomingFrameRate = 1.0; - if(diff >0) - { - _incomingFrameRate = nrOfFrames * 1000.0f / static_cast(diff); - } - } - else - { - _incomingFrameRate = static_cast(nrOfFrames); + } + if (num > 1) { + int64_t diff = now - incoming_frame_times_[num-1]; + incoming_frame_rate_ = 1.0; + if (diff > 0) { + incoming_frame_rate_ = nrOfFrames * 1000.0f / static_cast(diff); } + } else { + incoming_frame_rate_ = static_cast(nrOfFrames); + } } -} // namespace +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_decimator.h b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_decimator.h index 0a6a14085c1a..d17da618802e 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_decimator.h +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_decimator.h @@ -8,58 +8,53 @@ * be found in the AUTHORS file in the root of the source tree. */ -/* - * video_decimator.h - */ -#ifndef VPM_VIDEO_DECIMATOR_H -#define VPM_VIDEO_DECIMATOR_H +#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_VIDEO_DECIMATOR_H +#define WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_VIDEO_DECIMATOR_H #include "webrtc/modules/interface/module_common_types.h" #include "webrtc/typedefs.h" namespace webrtc { -class VPMVideoDecimator -{ -public: - VPMVideoDecimator(); - ~VPMVideoDecimator(); - - void Reset(); - - void EnableTemporalDecimation(bool enable); - - int32_t SetMaxFrameRate(uint32_t maxFrameRate); - int32_t SetTargetFrameRate(uint32_t frameRate); +class VPMVideoDecimator { + public: + VPMVideoDecimator(); + ~VPMVideoDecimator(); - bool DropFrame(); - - void UpdateIncomingFrameRate(); + void Reset(); - // Get Decimated Frame Rate/Dimensions - uint32_t DecimatedFrameRate(); + void EnableTemporalDecimation(bool enable); - //Get input frame rate - uint32_t InputFrameRate(); + int32_t SetMaxFramerate(uint32_t max_frame_rate); + int32_t SetTargetframe_rate(uint32_t frame_rate); -private: - void ProcessIncomingFrameRate(int64_t now); + bool DropFrame(); - enum { kFrameCountHistorySize = 90}; - enum { kFrameHistoryWindowMs = 2000}; + void UpdateIncomingframe_rate(); - // Temporal decimation - int32_t _overShootModifier; - uint32_t _dropCount; - uint32_t _keepCount; - uint32_t _targetFrameRate; - float _incomingFrameRate; - uint32_t _maxFrameRate; - int64_t _incomingFrameTimes[kFrameCountHistorySize]; - bool _enableTemporalDecimation; + // Get Decimated Frame Rate/Dimensions. + uint32_t Decimatedframe_rate(); + // Get input frame rate. + uint32_t Inputframe_rate(); + + private: + void ProcessIncomingframe_rate(int64_t now); + + enum { kFrameCountHistory_size = 90}; + enum { kFrameHistoryWindowMs = 2000}; + + // Temporal decimation. + int32_t overshoot_modifier_; + uint32_t drop_count_; + uint32_t keep_count_; + uint32_t target_frame_rate_; + float incoming_frame_rate_; + uint32_t max_frame_rate_; + int64_t incoming_frame_times_[kFrameCountHistory_size]; + bool enable_temporal_decimation_; }; -} // namespace +} // namespace webrtc -#endif +#endif // WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_VIDEO_DECIMATOR_H diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_processing.gypi b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_processing.gypi index f576b4d07f33..ae7fb953ce0e 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_processing.gypi +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_processing.gypi @@ -17,14 +17,6 @@ '<(webrtc_root)/common_video/common_video.gyp:common_video', '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', ], - 'include_dirs': [ - '../interface', - ], - 'direct_dependent_settings': { - 'include_dirs': [ - '../interface', - ], - }, 'sources': [ '../interface/video_processing.h', '../interface/video_processing_defines.h', @@ -66,10 +58,6 @@ 'sources': [ 'content_analysis_sse2.cc', ], - 'include_dirs': [ - '../interface', - '../../../interface', - ], 'conditions': [ ['os_posix==1 and OS!="mac"', { 'cflags': [ '-msse2', ], diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_processing_impl.cc b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_processing_impl.cc index 8152cd1738de..af1bfe1a412e 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_processing_impl.cc +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_processing_impl.cc @@ -17,277 +17,204 @@ namespace webrtc { -namespace -{ - void - SetSubSampling(VideoProcessingModule::FrameStats* stats, - const int32_t width, - const int32_t height) - { - if (width * height >= 640 * 480) - { - stats->subSamplWidth = 3; - stats->subSamplHeight = 3; - } - else if (width * height >= 352 * 288) - { - stats->subSamplWidth = 2; - stats->subSamplHeight = 2; - } - else if (width * height >= 176 * 144) - { - stats->subSamplWidth = 1; - stats->subSamplHeight = 1; - } - else - { - stats->subSamplWidth = 0; - stats->subSamplHeight = 0; - } - } -} - -VideoProcessingModule* -VideoProcessingModule::Create(const int32_t id) -{ - - return new VideoProcessingModuleImpl(id); -} - -void -VideoProcessingModule::Destroy(VideoProcessingModule* module) -{ - if (module) - { - delete static_cast(module); - } -} - -int32_t -VideoProcessingModuleImpl::ChangeUniqueId(const int32_t id) -{ - CriticalSectionScoped mutex(&_mutex); - _id = id; - _brightnessDetection.ChangeUniqueId(id); - _deflickering.ChangeUniqueId(id); - _denoising.ChangeUniqueId(id); - _framePreProcessor.ChangeUniqueId(id); - return VPM_OK; -} - -int32_t -VideoProcessingModuleImpl::Id() const -{ - CriticalSectionScoped mutex(&_mutex); - return _id; -} - -VideoProcessingModuleImpl::VideoProcessingModuleImpl(const int32_t id) : - _id(id), - _mutex(*CriticalSectionWrapper::CreateCriticalSection()) -{ - _brightnessDetection.ChangeUniqueId(id); - _deflickering.ChangeUniqueId(id); - _denoising.ChangeUniqueId(id); - _framePreProcessor.ChangeUniqueId(id); - WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, _id, - "Created"); -} - - -VideoProcessingModuleImpl::~VideoProcessingModuleImpl() -{ - WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, _id, - "Destroyed"); - - delete &_mutex; -} - -void -VideoProcessingModuleImpl::Reset() -{ - CriticalSectionScoped mutex(&_mutex); - _deflickering.Reset(); - _denoising.Reset(); - _brightnessDetection.Reset(); - _framePreProcessor.Reset(); - -} - -int32_t -VideoProcessingModule::GetFrameStats(FrameStats* stats, - const I420VideoFrame& frame) -{ - if (frame.IsZeroSize()) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, - "zero size frame"); - return VPM_PARAMETER_ERROR; - } - - int width = frame.width(); - int height = frame.height(); - - ClearFrameStats(stats); // The histogram needs to be zeroed out. - SetSubSampling(stats, width, height); - - const uint8_t* buffer = frame.buffer(kYPlane); - // Compute histogram and sum of frame - for (int i = 0; i < height; i += (1 << stats->subSamplHeight)) - { - int k = i * width; - for (int j = 0; j < width; j += (1 << stats->subSamplWidth)) - { - stats->hist[buffer[k + j]]++; - stats->sum += buffer[k + j]; - } - } - - stats->numPixels = (width * height) / ((1 << stats->subSamplWidth) * - (1 << stats->subSamplHeight)); - assert(stats->numPixels > 0); - - // Compute mean value of frame - stats->mean = stats->sum / stats->numPixels; - - return VPM_OK; -} - -bool -VideoProcessingModule::ValidFrameStats(const FrameStats& stats) -{ - if (stats.numPixels == 0) - { - return false; - } - - return true; -} - -void -VideoProcessingModule::ClearFrameStats(FrameStats* stats) -{ - stats->mean = 0; - stats->sum = 0; - stats->numPixels = 0; +namespace { +void SetSubSampling(VideoProcessingModule::FrameStats* stats, + const int32_t width, + const int32_t height) { + if (width * height >= 640 * 480) { + stats->subSamplWidth = 3; + stats->subSamplHeight = 3; + } else if (width * height >= 352 * 288) { + stats->subSamplWidth = 2; + stats->subSamplHeight = 2; + } else if (width * height >= 176 * 144) { + stats->subSamplWidth = 1; + stats->subSamplHeight = 1; + } else { stats->subSamplWidth = 0; stats->subSamplHeight = 0; - memset(stats->hist, 0, sizeof(stats->hist)); + } } - -int32_t -VideoProcessingModule::ColorEnhancement(I420VideoFrame* frame) -{ - return VideoProcessing::ColorEnhancement(frame); -} - -int32_t -VideoProcessingModule::Brighten(I420VideoFrame* frame, int delta) -{ - return VideoProcessing::Brighten(frame, delta); -} - -int32_t -VideoProcessingModuleImpl::Deflickering(I420VideoFrame* frame, - FrameStats* stats) -{ - CriticalSectionScoped mutex(&_mutex); - return _deflickering.ProcessFrame(frame, stats); -} - -int32_t -VideoProcessingModuleImpl::Denoising(I420VideoFrame* frame) -{ - CriticalSectionScoped mutex(&_mutex); - return _denoising.ProcessFrame(frame); -} - -int32_t -VideoProcessingModuleImpl::BrightnessDetection(const I420VideoFrame& frame, - const FrameStats& stats) -{ - CriticalSectionScoped mutex(&_mutex); - return _brightnessDetection.ProcessFrame(frame, stats); -} - - -void -VideoProcessingModuleImpl::EnableTemporalDecimation(bool enable) -{ - CriticalSectionScoped mutex(&_mutex); - _framePreProcessor.EnableTemporalDecimation(enable); -} - - -void -VideoProcessingModuleImpl::SetInputFrameResampleMode(VideoFrameResampling - resamplingMode) -{ - CriticalSectionScoped cs(&_mutex); - _framePreProcessor.SetInputFrameResampleMode(resamplingMode); -} - -int32_t -VideoProcessingModuleImpl::SetMaxFrameRate(uint32_t maxFrameRate) -{ - CriticalSectionScoped cs(&_mutex); - return _framePreProcessor.SetMaxFrameRate(maxFrameRate); - -} - -int32_t -VideoProcessingModuleImpl::SetTargetResolution(uint32_t width, - uint32_t height, - uint32_t frameRate) -{ - CriticalSectionScoped cs(&_mutex); - return _framePreProcessor.SetTargetResolution(width, height, frameRate); -} - - -uint32_t -VideoProcessingModuleImpl::DecimatedFrameRate() -{ - CriticalSectionScoped cs(&_mutex); - return _framePreProcessor.DecimatedFrameRate(); -} - - -uint32_t -VideoProcessingModuleImpl::DecimatedWidth() const -{ - CriticalSectionScoped cs(&_mutex); - return _framePreProcessor.DecimatedWidth(); -} - -uint32_t -VideoProcessingModuleImpl::DecimatedHeight() const -{ - CriticalSectionScoped cs(&_mutex); - return _framePreProcessor.DecimatedHeight(); -} - -int32_t -VideoProcessingModuleImpl::PreprocessFrame(const I420VideoFrame& frame, - I420VideoFrame **processedFrame) -{ - CriticalSectionScoped mutex(&_mutex); - return _framePreProcessor.PreprocessFrame(frame, processedFrame); -} - -VideoContentMetrics* -VideoProcessingModuleImpl::ContentMetrics() const -{ - CriticalSectionScoped mutex(&_mutex); - return _framePreProcessor.ContentMetrics(); -} - - -void -VideoProcessingModuleImpl::EnableContentAnalysis(bool enable) -{ - CriticalSectionScoped mutex(&_mutex); - _framePreProcessor.EnableContentAnalysis(enable); -} - } // namespace + +VideoProcessingModule* VideoProcessingModule::Create(const int32_t id) { + return new VideoProcessingModuleImpl(id); +} + +void VideoProcessingModule::Destroy(VideoProcessingModule* module) { + if (module) + delete static_cast(module); +} + +int32_t VideoProcessingModuleImpl::ChangeUniqueId(const int32_t id) { + CriticalSectionScoped mutex(&mutex_); + id_ = id; + brightness_detection_.ChangeUniqueId(id); + deflickering_.ChangeUniqueId(id); + denoising_.ChangeUniqueId(id); + frame_pre_processor_.ChangeUniqueId(id); + return VPM_OK; +} + +int32_t VideoProcessingModuleImpl::Id() const { + CriticalSectionScoped mutex(&mutex_); + return id_; +} + +VideoProcessingModuleImpl::VideoProcessingModuleImpl(const int32_t id) + : id_(id), + mutex_(*CriticalSectionWrapper::CreateCriticalSection()) { + brightness_detection_.ChangeUniqueId(id); + deflickering_.ChangeUniqueId(id); + denoising_.ChangeUniqueId(id); + frame_pre_processor_.ChangeUniqueId(id); + WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, id_, + "Created"); +} + +VideoProcessingModuleImpl::~VideoProcessingModuleImpl() { + WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, id_, + "Destroyed"); + delete &mutex_; +} + +void VideoProcessingModuleImpl::Reset() { + CriticalSectionScoped mutex(&mutex_); + deflickering_.Reset(); + denoising_.Reset(); + brightness_detection_.Reset(); + frame_pre_processor_.Reset(); +} + +int32_t VideoProcessingModule::GetFrameStats(FrameStats* stats, + const I420VideoFrame& frame) { + if (frame.IsZeroSize()) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, + "zero size frame"); + return VPM_PARAMETER_ERROR; + } + + int width = frame.width(); + int height = frame.height(); + + ClearFrameStats(stats); // The histogram needs to be zeroed out. + SetSubSampling(stats, width, height); + + const uint8_t* buffer = frame.buffer(kYPlane); + // Compute histogram and sum of frame + for (int i = 0; i < height; i += (1 << stats->subSamplHeight)) { + int k = i * width; + for (int j = 0; j < width; j += (1 << stats->subSamplWidth)) { + stats->hist[buffer[k + j]]++; + stats->sum += buffer[k + j]; + } + } + + stats->num_pixels = (width * height) / ((1 << stats->subSamplWidth) * + (1 << stats->subSamplHeight)); + assert(stats->num_pixels > 0); + + // Compute mean value of frame + stats->mean = stats->sum / stats->num_pixels; + + return VPM_OK; +} + +bool VideoProcessingModule::ValidFrameStats(const FrameStats& stats) { + if (stats.num_pixels == 0) return false; + return true; +} + +void VideoProcessingModule::ClearFrameStats(FrameStats* stats) { + stats->mean = 0; + stats->sum = 0; + stats->num_pixels = 0; + stats->subSamplWidth = 0; + stats->subSamplHeight = 0; + memset(stats->hist, 0, sizeof(stats->hist)); +} + +int32_t VideoProcessingModule::ColorEnhancement(I420VideoFrame* frame) { + return VideoProcessing::ColorEnhancement(frame); +} + +int32_t VideoProcessingModule::Brighten(I420VideoFrame* frame, int delta) { + return VideoProcessing::Brighten(frame, delta); +} + +int32_t VideoProcessingModuleImpl::Deflickering(I420VideoFrame* frame, + FrameStats* stats) { + CriticalSectionScoped mutex(&mutex_); + return deflickering_.ProcessFrame(frame, stats); +} + +int32_t VideoProcessingModuleImpl::Denoising(I420VideoFrame* frame) { + CriticalSectionScoped mutex(&mutex_); + return denoising_.ProcessFrame(frame); +} + +int32_t VideoProcessingModuleImpl::BrightnessDetection( + const I420VideoFrame& frame, + const FrameStats& stats) { + CriticalSectionScoped mutex(&mutex_); + return brightness_detection_.ProcessFrame(frame, stats); +} + + +void VideoProcessingModuleImpl::EnableTemporalDecimation(bool enable) { + CriticalSectionScoped mutex(&mutex_); + frame_pre_processor_.EnableTemporalDecimation(enable); +} + + +void VideoProcessingModuleImpl::SetInputFrameResampleMode(VideoFrameResampling + resampling_mode) { + CriticalSectionScoped cs(&mutex_); + frame_pre_processor_.SetInputFrameResampleMode(resampling_mode); +} + +int32_t VideoProcessingModuleImpl::SetMaxFramerate(uint32_t max_frame_rate) { + CriticalSectionScoped cs(&mutex_); + return frame_pre_processor_.SetMaxFramerate(max_frame_rate); +} + +int32_t VideoProcessingModuleImpl::SetTargetResolution(uint32_t width, + uint32_t height, + uint32_t frame_rate) { + CriticalSectionScoped cs(&mutex_); + return frame_pre_processor_.SetTargetResolution(width, height, frame_rate); +} + +uint32_t VideoProcessingModuleImpl::Decimatedframe_rate() { + CriticalSectionScoped cs(&mutex_); + return frame_pre_processor_.Decimatedframe_rate(); +} + +uint32_t VideoProcessingModuleImpl::DecimatedWidth() const { + CriticalSectionScoped cs(&mutex_); + return frame_pre_processor_.DecimatedWidth(); +} + +uint32_t VideoProcessingModuleImpl::DecimatedHeight() const { + CriticalSectionScoped cs(&mutex_); + return frame_pre_processor_.DecimatedHeight(); +} + +int32_t VideoProcessingModuleImpl::PreprocessFrame( + const I420VideoFrame& frame, + I420VideoFrame **processed_frame) { + CriticalSectionScoped mutex(&mutex_); + return frame_pre_processor_.PreprocessFrame(frame, processed_frame); +} + +VideoContentMetrics* VideoProcessingModuleImpl::ContentMetrics() const { + CriticalSectionScoped mutex(&mutex_); + return frame_pre_processor_.ContentMetrics(); +} + +void VideoProcessingModuleImpl::EnableContentAnalysis(bool enable) { + CriticalSectionScoped mutex(&mutex_); + frame_pre_processor_.EnableContentAnalysis(enable); +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_processing_impl.h b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_processing_impl.h index 278aa7b146c7..913bb6483645 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_processing_impl.h +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_processing_impl.h @@ -22,67 +22,64 @@ namespace webrtc { class CriticalSectionWrapper; -class VideoProcessingModuleImpl : public VideoProcessingModule -{ -public: +class VideoProcessingModuleImpl : public VideoProcessingModule { + public: + VideoProcessingModuleImpl(int32_t id); - VideoProcessingModuleImpl(int32_t id); + virtual ~VideoProcessingModuleImpl(); - virtual ~VideoProcessingModuleImpl(); + int32_t Id() const; - int32_t Id() const; + virtual int32_t ChangeUniqueId(const int32_t id); - virtual int32_t ChangeUniqueId(const int32_t id); + virtual void Reset(); - virtual void Reset(); + virtual int32_t Deflickering(I420VideoFrame* frame, FrameStats* stats); - virtual int32_t Deflickering(I420VideoFrame* frame, FrameStats* stats); + virtual int32_t Denoising(I420VideoFrame* frame); - virtual int32_t Denoising(I420VideoFrame* frame); + virtual int32_t BrightnessDetection(const I420VideoFrame& frame, + const FrameStats& stats); - virtual int32_t BrightnessDetection(const I420VideoFrame& frame, - const FrameStats& stats); + // Frame pre-processor functions - //Frame pre-processor functions + // Enable temporal decimation + virtual void EnableTemporalDecimation(bool enable); - //Enable temporal decimation - virtual void EnableTemporalDecimation(bool enable); + virtual void SetInputFrameResampleMode(VideoFrameResampling resampling_mode); - virtual void SetInputFrameResampleMode(VideoFrameResampling resamplingMode); + // Enable content analysis + virtual void EnableContentAnalysis(bool enable); - //Enable content analysis - virtual void EnableContentAnalysis(bool enable); + // Set max frame rate + virtual int32_t SetMaxFramerate(uint32_t max_frame_rate); - //Set max frame rate - virtual int32_t SetMaxFrameRate(uint32_t maxFrameRate); - - // Set Target Resolution: frame rate and dimension - virtual int32_t SetTargetResolution(uint32_t width, - uint32_t height, - uint32_t frameRate); + // Set Target Resolution: frame rate and dimension + virtual int32_t SetTargetResolution(uint32_t width, + uint32_t height, + uint32_t frame_rate); - // Get decimated values: frame rate/dimension - virtual uint32_t DecimatedFrameRate(); - virtual uint32_t DecimatedWidth() const; - virtual uint32_t DecimatedHeight() const; + // Get decimated values: frame rate/dimension + virtual uint32_t Decimatedframe_rate(); + virtual uint32_t DecimatedWidth() const; + virtual uint32_t DecimatedHeight() const; - // Preprocess: - // Pre-process incoming frame: Sample when needed and compute content - // metrics when enabled. - // If no resampling takes place - processedFrame is set to NULL. - virtual int32_t PreprocessFrame(const I420VideoFrame& frame, - I420VideoFrame** processedFrame); - virtual VideoContentMetrics* ContentMetrics() const; + // Preprocess: + // Pre-process incoming frame: Sample when needed and compute content + // metrics when enabled. + // If no resampling takes place - processed_frame is set to NULL. + virtual int32_t PreprocessFrame(const I420VideoFrame& frame, + I420VideoFrame** processed_frame); + virtual VideoContentMetrics* ContentMetrics() const; -private: - int32_t _id; - CriticalSectionWrapper& _mutex; - - VPMDeflickering _deflickering; - VPMDenoising _denoising; - VPMBrightnessDetection _brightnessDetection; - VPMFramePreprocessor _framePreProcessor; + private: + int32_t id_; + CriticalSectionWrapper& mutex_; + VPMDeflickering deflickering_; + VPMDenoising denoising_; + VPMBrightnessDetection brightness_detection_; + VPMFramePreprocessor frame_pre_processor_; }; } // namespace diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/brightness_detection_test.cc b/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/brightness_detection_test.cc index d17ade9696fd..d7ac72908aae 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/brightness_detection_test.cc +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/brightness_detection_test.cc @@ -19,24 +19,24 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection) uint32_t frameNum = 0; int32_t brightnessWarning = 0; uint32_t warningCount = 0; - scoped_array video_buffer(new uint8_t[_frame_length]); - while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) == - _frame_length) + scoped_array video_buffer(new uint8_t[frame_length_]); + while (fread(video_buffer.get(), 1, frame_length_, source_file_) == + frame_length_) { EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - _width, _height, - 0, kRotateNone, &_videoFrame)); + width_, height_, + 0, kRotateNone, &video_frame_)); frameNum++; VideoProcessingModule::FrameStats stats; - ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame)); - ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame, + ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_)); + ASSERT_GE(brightnessWarning = vpm_->BrightnessDetection(video_frame_, stats), 0); if (brightnessWarning != VideoProcessingModule::kNoWarning) { warningCount++; } } - ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file"; + ASSERT_NE(0, feof(source_file_)) << "Error reading source file"; // Expect few warnings float warningProportion = static_cast(warningCount) / frameNum * 100; @@ -44,21 +44,21 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection) printf("Stock foreman: %.1f %%\n", warningProportion); EXPECT_LT(warningProportion, 10); - rewind(_sourceFile); + rewind(source_file_); frameNum = 0; warningCount = 0; - while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) == - _frame_length && + while (fread(video_buffer.get(), 1, frame_length_, source_file_) == + frame_length_ && frameNum < 300) { EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - _width, _height, - 0, kRotateNone, &_videoFrame)); + width_, height_, + 0, kRotateNone, &video_frame_)); frameNum++; - uint8_t* frame = _videoFrame.buffer(kYPlane); + uint8_t* frame = video_frame_.buffer(kYPlane); uint32_t yTmp = 0; - for (int yIdx = 0; yIdx < _width * _height; yIdx++) + for (int yIdx = 0; yIdx < width_ * height_; yIdx++) { yTmp = frame[yIdx] << 1; if (yTmp > 255) @@ -69,8 +69,8 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection) } VideoProcessingModule::FrameStats stats; - ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame)); - ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame, + ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_)); + ASSERT_GE(brightnessWarning = vpm_->BrightnessDetection(video_frame_, stats), 0); EXPECT_NE(VideoProcessingModule::kDarkWarning, brightnessWarning); if (brightnessWarning == VideoProcessingModule::kBrightWarning) @@ -78,35 +78,35 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection) warningCount++; } } - ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file"; + ASSERT_NE(0, feof(source_file_)) << "Error reading source file"; // Expect many brightness warnings warningProportion = static_cast(warningCount) / frameNum * 100; printf("Bright foreman: %.1f %%\n", warningProportion); EXPECT_GT(warningProportion, 95); - rewind(_sourceFile); + rewind(source_file_); frameNum = 0; warningCount = 0; - while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) == - _frame_length && frameNum < 300) + while (fread(video_buffer.get(), 1, frame_length_, source_file_) == + frame_length_ && frameNum < 300) { EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - _width, _height, - 0, kRotateNone, &_videoFrame)); + width_, height_, + 0, kRotateNone, &video_frame_)); frameNum++; - uint8_t* y_plane = _videoFrame.buffer(kYPlane); + uint8_t* y_plane = video_frame_.buffer(kYPlane); int32_t yTmp = 0; - for (int yIdx = 0; yIdx < _width * _height; yIdx++) + for (int yIdx = 0; yIdx < width_ * height_; yIdx++) { yTmp = y_plane[yIdx] >> 1; y_plane[yIdx] = static_cast(yTmp); } VideoProcessingModule::FrameStats stats; - ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame)); - ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame, + ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_)); + ASSERT_GE(brightnessWarning = vpm_->BrightnessDetection(video_frame_, stats), 0); EXPECT_NE(VideoProcessingModule::kBrightWarning, brightnessWarning); if (brightnessWarning == VideoProcessingModule::kDarkWarning) @@ -114,7 +114,7 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection) warningCount++; } } - ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file"; + ASSERT_NE(0, feof(source_file_)) << "Error reading source file"; // Expect many darkness warnings warningProportion = static_cast(warningCount) / frameNum * 100; diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/color_enhancement_test.cc b/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/color_enhancement_test.cc index c048003fb919..fc560bef138a 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/color_enhancement_test.cc +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/color_enhancement_test.cc @@ -23,14 +23,14 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement) { TickTime t0; TickTime t1; - TickInterval accTicks; + TickInterval acc_ticks; // Use a shorter version of the Foreman clip for this test. - fclose(_sourceFile); + fclose(source_file_); const std::string video_file = webrtc::test::ResourcePath("foreman_cif_short", "yuv"); - _sourceFile = fopen(video_file.c_str(), "rb"); - ASSERT_TRUE(_sourceFile != NULL) << + source_file_ = fopen(video_file.c_str(), "rb"); + ASSERT_TRUE(source_file_ != NULL) << "Cannot read source file: " + video_file + "\n"; std::string output_file = webrtc::test::OutputPath() + @@ -39,27 +39,27 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement) ASSERT_TRUE(modFile != NULL) << "Could not open output file.\n"; uint32_t frameNum = 0; - scoped_array video_buffer(new uint8_t[_frame_length]); - while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) == - _frame_length) + scoped_array video_buffer(new uint8_t[frame_length_]); + while (fread(video_buffer.get(), 1, frame_length_, source_file_) == + frame_length_) { // Using ConvertToI420 to add stride to the image. EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - _width, _height, - 0, kRotateNone, &_videoFrame)); + width_, height_, + 0, kRotateNone, &video_frame_)); frameNum++; t0 = TickTime::Now(); - ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&_videoFrame)); + ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&video_frame_)); t1 = TickTime::Now(); - accTicks += t1 - t0; - if (PrintI420VideoFrame(_videoFrame, modFile) < 0) { + acc_ticks += t1 - t0; + if (PrintI420VideoFrame(video_frame_, modFile) < 0) { return; } } - ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file"; + ASSERT_NE(0, feof(source_file_)) << "Error reading source file"; printf("\nTime per frame: %d us \n", - static_cast(accTicks.Microseconds() / frameNum)); + static_cast(acc_ticks.Microseconds() / frameNum)); rewind(modFile); printf("Comparing files...\n\n"); @@ -82,62 +82,62 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement) ASSERT_EQ(refLen, testLen) << "File lengths differ."; I420VideoFrame refVideoFrame; - refVideoFrame.CreateEmptyFrame(_width, _height, - _width, _half_width, _half_width); + refVideoFrame.CreateEmptyFrame(width_, height_, + width_, half_width_, half_width_); // Compare frame-by-frame. - scoped_array ref_buffer(new uint8_t[_frame_length]); - while (fread(video_buffer.get(), 1, _frame_length, modFile) == - _frame_length) + scoped_array ref_buffer(new uint8_t[frame_length_]); + while (fread(video_buffer.get(), 1, frame_length_, modFile) == + frame_length_) { // Using ConvertToI420 to add stride to the image. EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - _width, _height, - 0, kRotateNone, &_videoFrame)); - ASSERT_EQ(_frame_length, fread(ref_buffer.get(), 1, _frame_length, + width_, height_, + 0, kRotateNone, &video_frame_)); + ASSERT_EQ(frame_length_, fread(ref_buffer.get(), 1, frame_length_, refFile)); EXPECT_EQ(0, ConvertToI420(kI420, ref_buffer.get(), 0, 0, - _width, _height, + width_, height_, 0, kRotateNone, &refVideoFrame)); - EXPECT_EQ(0, memcmp(_videoFrame.buffer(kYPlane), + EXPECT_EQ(0, memcmp(video_frame_.buffer(kYPlane), refVideoFrame.buffer(kYPlane), - _size_y)); - EXPECT_EQ(0, memcmp(_videoFrame.buffer(kUPlane), + size_y_)); + EXPECT_EQ(0, memcmp(video_frame_.buffer(kUPlane), refVideoFrame.buffer(kUPlane), - _size_uv)); - EXPECT_EQ(0, memcmp(_videoFrame.buffer(kVPlane), + size_uv_)); + EXPECT_EQ(0, memcmp(video_frame_.buffer(kVPlane), refVideoFrame.buffer(kVPlane), - _size_uv)); + size_uv_)); } - ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file"; + ASSERT_NE(0, feof(source_file_)) << "Error reading source file"; // Verify that all color pixels are enhanced, and no luminance values are // altered. - scoped_array testFrame(new uint8_t[_frame_length]); + scoped_array testFrame(new uint8_t[frame_length_]); // Use value 128 as probe value, since we know that this will be changed // in the enhancement. - memset(testFrame.get(), 128, _frame_length); + memset(testFrame.get(), 128, frame_length_); I420VideoFrame testVideoFrame; - testVideoFrame.CreateEmptyFrame(_width, _height, - _width, _half_width, _half_width); + testVideoFrame.CreateEmptyFrame(width_, height_, + width_, half_width_, half_width_); EXPECT_EQ(0, ConvertToI420(kI420, testFrame.get(), 0, 0, - _width, _height, 0, kRotateNone, + width_, height_, 0, kRotateNone, &testVideoFrame)); ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&testVideoFrame)); EXPECT_EQ(0, memcmp(testVideoFrame.buffer(kYPlane), testFrame.get(), - _size_y)) + size_y_)) << "Function is modifying the luminance."; EXPECT_NE(0, memcmp(testVideoFrame.buffer(kUPlane), - testFrame.get() + _size_y, _size_uv)) << + testFrame.get() + size_y_, size_uv_)) << "Function is not modifying all chrominance pixels"; EXPECT_NE(0, memcmp(testVideoFrame.buffer(kVPlane), - testFrame.get() + _size_y + _size_uv, _size_uv)) << + testFrame.get() + size_y_ + size_uv_, size_uv_)) << "Function is not modifying all chrominance pixels"; ASSERT_EQ(0, fclose(refFile)); diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/content_metrics_test.cc b/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/content_metrics_test.cc index 26080da784e4..36a1ad7625de 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/content_metrics_test.cc +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/content_metrics_test.cc @@ -15,32 +15,30 @@ namespace webrtc { -TEST_F(VideoProcessingModuleTest, ContentAnalysis) -{ - VPMContentAnalysis _ca_c(false); - VPMContentAnalysis _ca_sse(true); - VideoContentMetrics *_cM_c, *_cM_SSE; +TEST_F(VideoProcessingModuleTest, ContentAnalysis) { + VPMContentAnalysis ca__c(false); + VPMContentAnalysis ca__sse(true); + VideoContentMetrics *_cM_c, *_cM_SSE; - _ca_c.Initialize(_width,_height); - _ca_sse.Initialize(_width,_height); + ca__c.Initialize(width_,height_); + ca__sse.Initialize(width_,height_); - scoped_array video_buffer(new uint8_t[_frame_length]); - while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) - == _frame_length) - { - // Using ConvertToI420 to add stride to the image. - EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - _width, _height, - 0, kRotateNone, &_videoFrame)); - _cM_c = _ca_c.ComputeContentMetrics(_videoFrame); - _cM_SSE = _ca_sse.ComputeContentMetrics(_videoFrame); + scoped_array video_buffer(new uint8_t[frame_length_]); + while (fread(video_buffer.get(), 1, frame_length_, source_file_) + == frame_length_) { + // Using ConvertToI420 to add stride to the image. + EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, + width_, height_, + 0, kRotateNone, &video_frame_)); + _cM_c = ca__c.ComputeContentMetrics(video_frame_); + _cM_SSE = ca__sse.ComputeContentMetrics(video_frame_); - ASSERT_EQ(_cM_c->spatial_pred_err, _cM_SSE->spatial_pred_err); - ASSERT_EQ(_cM_c->spatial_pred_err_v, _cM_SSE->spatial_pred_err_v); - ASSERT_EQ(_cM_c->spatial_pred_err_h, _cM_SSE->spatial_pred_err_h); - ASSERT_EQ(_cM_c->motion_magnitude, _cM_SSE->motion_magnitude); - } - ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file"; + ASSERT_EQ(_cM_c->spatial_pred_err, _cM_SSE->spatial_pred_err); + ASSERT_EQ(_cM_c->spatial_pred_err_v, _cM_SSE->spatial_pred_err_v); + ASSERT_EQ(_cM_c->spatial_pred_err_h, _cM_SSE->spatial_pred_err_h); + ASSERT_EQ(_cM_c->motion_magnitude, _cM_SSE->motion_magnitude); + } + ASSERT_NE(0, feof(source_file_)) << "Error reading source file"; } } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/deflickering_test.cc b/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/deflickering_test.cc index 85f4fd6ebb42..0fa3f48b4f2b 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/deflickering_test.cc +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/deflickering_test.cc @@ -23,17 +23,17 @@ TEST_F(VideoProcessingModuleTest, Deflickering) { enum { NumRuns = 30 }; uint32_t frameNum = 0; - const uint32_t frameRate = 15; + const uint32_t frame_rate = 15; - int64_t minRuntime = 0; - int64_t avgRuntime = 0; + int64_t min_runtime = 0; + int64_t avg_runtime = 0; // Close automatically opened Foreman. - fclose(_sourceFile); + fclose(source_file_); const std::string input_file = webrtc::test::ResourcePath("deflicker_before_cif_short", "yuv"); - _sourceFile = fopen(input_file.c_str(), "rb"); - ASSERT_TRUE(_sourceFile != NULL) << + source_file_ = fopen(input_file.c_str(), "rb"); + ASSERT_TRUE(source_file_ != NULL) << "Cannot read input file: " << input_file << "\n"; const std::string output_file = @@ -43,57 +43,57 @@ TEST_F(VideoProcessingModuleTest, Deflickering) "Could not open output file: " << output_file << "\n"; printf("\nRun time [us / frame]:\n"); - scoped_array video_buffer(new uint8_t[_frame_length]); - for (uint32_t runIdx = 0; runIdx < NumRuns; runIdx++) + scoped_array video_buffer(new uint8_t[frame_length_]); + for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++) { TickTime t0; TickTime t1; - TickInterval accTicks; + TickInterval acc_ticks; uint32_t timeStamp = 1; frameNum = 0; - while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) == - _frame_length) + while (fread(video_buffer.get(), 1, frame_length_, source_file_) == + frame_length_) { frameNum++; EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - _width, _height, - 0, kRotateNone, &_videoFrame)); - _videoFrame.set_timestamp(timeStamp); + width_, height_, + 0, kRotateNone, &video_frame_)); + video_frame_.set_timestamp(timeStamp); t0 = TickTime::Now(); VideoProcessingModule::FrameStats stats; - ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame)); - ASSERT_EQ(0, _vpm->Deflickering(&_videoFrame, &stats)); + ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_)); + ASSERT_EQ(0, vpm_->Deflickering(&video_frame_, &stats)); t1 = TickTime::Now(); - accTicks += (t1 - t0); + acc_ticks += (t1 - t0); - if (runIdx == 0) + if (run_idx == 0) { - if (PrintI420VideoFrame(_videoFrame, deflickerFile) < 0) { + if (PrintI420VideoFrame(video_frame_, deflickerFile) < 0) { return; } } - timeStamp += (90000 / frameRate); + timeStamp += (90000 / frame_rate); } - ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file"; + ASSERT_NE(0, feof(source_file_)) << "Error reading source file"; - printf("%u\n", static_cast(accTicks.Microseconds() / frameNum)); - if (accTicks.Microseconds() < minRuntime || runIdx == 0) + printf("%u\n", static_cast(acc_ticks.Microseconds() / frameNum)); + if (acc_ticks.Microseconds() < min_runtime || run_idx == 0) { - minRuntime = accTicks.Microseconds(); + min_runtime = acc_ticks.Microseconds(); } - avgRuntime += accTicks.Microseconds(); + avg_runtime += acc_ticks.Microseconds(); - rewind(_sourceFile); + rewind(source_file_); } ASSERT_EQ(0, fclose(deflickerFile)); // TODO(kjellander): Add verification of deflicker output file. printf("\nAverage run time = %d us / frame\n", - static_cast(avgRuntime / frameNum / NumRuns)); + static_cast(avg_runtime / frameNum / NumRuns)); printf("Min run time = %d us / frame\n\n", - static_cast(minRuntime / frameNum)); + static_cast(min_runtime / frameNum)); } } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/denoising_test.cc b/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/denoising_test.cc index 8a3439318b1c..3023a2d7af1c 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/denoising_test.cc +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/denoising_test.cc @@ -25,8 +25,8 @@ TEST_F(VideoProcessingModuleTest, DISABLED_ON_ANDROID(Denoising)) enum { NumRuns = 10 }; uint32_t frameNum = 0; - int64_t minRuntime = 0; - int64_t avgRuntime = 0; + int64_t min_runtime = 0; + int64_t avg_runtime = 0; const std::string denoise_filename = webrtc::test::OutputPath() + "denoise_testfile.yuv"; @@ -41,50 +41,50 @@ TEST_F(VideoProcessingModuleTest, DISABLED_ON_ANDROID(Denoising)) "Could not open noisy file: " << noise_filename << "\n"; printf("\nRun time [us / frame]:\n"); - for (uint32_t runIdx = 0; runIdx < NumRuns; runIdx++) + for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++) { TickTime t0; TickTime t1; - TickInterval accTicks; + TickInterval acc_ticks; int32_t modifiedPixels = 0; frameNum = 0; - scoped_array video_buffer(new uint8_t[_frame_length]); - while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) == - _frame_length) + scoped_array video_buffer(new uint8_t[frame_length_]); + while (fread(video_buffer.get(), 1, frame_length_, source_file_) == + frame_length_) { EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - _width, _height, - 0, kRotateNone, &_videoFrame)); + width_, height_, + 0, kRotateNone, &video_frame_)); frameNum++; - uint8_t* sourceBuffer = _videoFrame.buffer(kYPlane); + uint8_t* sourceBuffer = video_frame_.buffer(kYPlane); // Add noise to a part in video stream // Random noise // TODO: investigate the effectiveness of this test. - for (int ir = 0; ir < _height; ir++) + for (int ir = 0; ir < height_; ir++) { - uint32_t ik = ir * _width; - for (int ic = 0; ic < _width; ic++) + uint32_t ik = ir * width_; + for (int ic = 0; ic < width_; ic++) { uint8_t r = rand() % 16; r -= 8; - if (ir < _height / 4) + if (ir < height_ / 4) r = 0; - if (ir >= 3 * _height / 4) + if (ir >= 3 * height_ / 4) r = 0; - if (ic < _width / 4) + if (ic < width_ / 4) r = 0; - if (ic >= 3 * _width / 4) + if (ic >= 3 * width_ / 4) r = 0; /*uint8_t pixelValue = 0; - if (ir >= _height / 2) + if (ir >= height_ / 2) { // Region 3 or 4 pixelValue = 170; } - if (ic >= _width / 2) + if (ic >= width_ / 2) { // Region 2 or 4 pixelValue += 85; } @@ -95,42 +95,42 @@ TEST_F(VideoProcessingModuleTest, DISABLED_ON_ANDROID(Denoising)) } } - if (runIdx == 0) + if (run_idx == 0) { - if (PrintI420VideoFrame(_videoFrame, noiseFile) < 0) { + if (PrintI420VideoFrame(video_frame_, noiseFile) < 0) { return; } } t0 = TickTime::Now(); - ASSERT_GE(modifiedPixels = _vpm->Denoising(&_videoFrame), 0); + ASSERT_GE(modifiedPixels = vpm_->Denoising(&video_frame_), 0); t1 = TickTime::Now(); - accTicks += (t1 - t0); + acc_ticks += (t1 - t0); - if (runIdx == 0) + if (run_idx == 0) { - if (PrintI420VideoFrame(_videoFrame, noiseFile) < 0) { + if (PrintI420VideoFrame(video_frame_, noiseFile) < 0) { return; } } } - ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file"; + ASSERT_NE(0, feof(source_file_)) << "Error reading source file"; - printf("%u\n", static_cast(accTicks.Microseconds() / frameNum)); - if (accTicks.Microseconds() < minRuntime || runIdx == 0) + printf("%u\n", static_cast(acc_ticks.Microseconds() / frameNum)); + if (acc_ticks.Microseconds() < min_runtime || run_idx == 0) { - minRuntime = accTicks.Microseconds(); + min_runtime = acc_ticks.Microseconds(); } - avgRuntime += accTicks.Microseconds(); + avg_runtime += acc_ticks.Microseconds(); - rewind(_sourceFile); + rewind(source_file_); } ASSERT_EQ(0, fclose(denoiseFile)); ASSERT_EQ(0, fclose(noiseFile)); printf("\nAverage run time = %d us / frame\n", - static_cast(avgRuntime / frameNum / NumRuns)); + static_cast(avg_runtime / frameNum / NumRuns)); printf("Min run time = %d us / frame\n\n", - static_cast(minRuntime / frameNum)); + static_cast(min_runtime / frameNum)); } } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.cc b/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.cc index 66452603c012..6e54923063e2 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.cc @@ -18,297 +18,288 @@ namespace webrtc { -// The |sourceFrame| is scaled to |target_width|,|target_height|, using the +// The |sourceFrame| is scaled to |targetwidth_|,|targetheight_|, using the // filter mode set to |mode|. The |expected_psnr| is used to verify basic // quality when the resampled frame is scaled back up/down to the // original/source size. |expected_psnr| is set to be ~0.1/0.05dB lower than // actual PSNR verified under the same conditions. -void TestSize(const I420VideoFrame& sourceFrame, int target_width, - int target_height, int mode, double expected_psnr, +void TestSize(const I420VideoFrame& sourceFrame, int targetwidth_, + int targetheight_, int mode, double expected_psnr, VideoProcessingModule* vpm); bool CompareFrames(const webrtc::I420VideoFrame& frame1, const webrtc::I420VideoFrame& frame2); -VideoProcessingModuleTest::VideoProcessingModuleTest() : - _vpm(NULL), - _sourceFile(NULL), - _width(352), - _half_width((_width + 1) / 2), - _height(288), - _size_y(_width * _height), - _size_uv(_half_width * ((_height + 1) / 2)), - _frame_length(CalcBufferSize(kI420, _width, _height)) -{ -} +VideoProcessingModuleTest::VideoProcessingModuleTest() + : vpm_(NULL), + source_file_(NULL), + width_(352), + half_width_((width_ + 1) / 2), + height_(288), + size_y_(width_ * height_), + size_uv_(half_width_ * ((height_ + 1) / 2)), + frame_length_(CalcBufferSize(kI420, width_, height_)) {} -void VideoProcessingModuleTest::SetUp() -{ - _vpm = VideoProcessingModule::Create(0); - ASSERT_TRUE(_vpm != NULL); - - ASSERT_EQ(0, _videoFrame.CreateEmptyFrame(_width, _height, _width, - _half_width, _half_width)); +void VideoProcessingModuleTest::SetUp() { + vpm_ = VideoProcessingModule::Create(0); + ASSERT_TRUE(vpm_ != NULL); + ASSERT_EQ(0, video_frame_.CreateEmptyFrame(width_, height_, width_, + half_width_, half_width_)); + // Clear video frame so DrMemory/Valgrind will allow reads of the buffer. + memset(video_frame_.buffer(kYPlane), 0, video_frame_.allocated_size(kYPlane)); + memset(video_frame_.buffer(kUPlane), 0, video_frame_.allocated_size(kUPlane)); + memset(video_frame_.buffer(kVPlane), 0, video_frame_.allocated_size(kVPlane)); const std::string video_file = webrtc::test::ResourcePath("foreman_cif", "yuv"); - _sourceFile = fopen(video_file.c_str(),"rb"); - ASSERT_TRUE(_sourceFile != NULL) << + source_file_ = fopen(video_file.c_str(),"rb"); + ASSERT_TRUE(source_file_ != NULL) << "Cannot read source file: " + video_file + "\n"; } -void VideoProcessingModuleTest::TearDown() -{ - if (_sourceFile != NULL) { - ASSERT_EQ(0, fclose(_sourceFile)); +void VideoProcessingModuleTest::TearDown() { + if (source_file_ != NULL) { + ASSERT_EQ(0, fclose(source_file_)); } - _sourceFile = NULL; + source_file_ = NULL; - if (_vpm != NULL) { - VideoProcessingModule::Destroy(_vpm); + if (vpm_ != NULL) { + VideoProcessingModule::Destroy(vpm_); } - _vpm = NULL; + vpm_ = NULL; } -TEST_F(VideoProcessingModuleTest, HandleNullBuffer) -{ +TEST_F(VideoProcessingModuleTest, HandleNullBuffer) { // TODO(mikhal/stefan): Do we need this one? VideoProcessingModule::FrameStats stats; // Video frame with unallocated buffer. I420VideoFrame videoFrame; - videoFrame.set_width(_width); - videoFrame.set_height(_height); + videoFrame.set_width(width_); + videoFrame.set_height(height_); - EXPECT_EQ(-3, _vpm->GetFrameStats(&stats, videoFrame)); + EXPECT_EQ(-3, vpm_->GetFrameStats(&stats, videoFrame)); - EXPECT_EQ(-1, _vpm->ColorEnhancement(&videoFrame)); + EXPECT_EQ(-1, vpm_->ColorEnhancement(&videoFrame)); - EXPECT_EQ(-1, _vpm->Deflickering(&videoFrame, &stats)); + EXPECT_EQ(-1, vpm_->Deflickering(&videoFrame, &stats)); - EXPECT_EQ(-1, _vpm->Denoising(&videoFrame)); + EXPECT_EQ(-1, vpm_->Denoising(&videoFrame)); - EXPECT_EQ(-3, _vpm->BrightnessDetection(videoFrame, stats)); + EXPECT_EQ(-3, vpm_->BrightnessDetection(videoFrame, stats)); } -TEST_F(VideoProcessingModuleTest, HandleBadStats) -{ +TEST_F(VideoProcessingModuleTest, HandleBadStats) { VideoProcessingModule::FrameStats stats; - scoped_array video_buffer(new uint8_t[_frame_length]); - ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length, - _sourceFile)); + scoped_array video_buffer(new uint8_t[frame_length_]); + ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_, + source_file_)); EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - _width, _height, - 0, kRotateNone, &_videoFrame)); + width_, height_, + 0, kRotateNone, &video_frame_)); - EXPECT_EQ(-1, _vpm->Deflickering(&_videoFrame, &stats)); + EXPECT_EQ(-1, vpm_->Deflickering(&video_frame_, &stats)); - EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats)); + EXPECT_EQ(-3, vpm_->BrightnessDetection(video_frame_, stats)); } -TEST_F(VideoProcessingModuleTest, HandleBadSize) -{ +TEST_F(VideoProcessingModuleTest, HandleBadSize) { VideoProcessingModule::FrameStats stats; - _videoFrame.ResetSize(); - _videoFrame.set_width(_width); - _videoFrame.set_height(0); - EXPECT_EQ(-3, _vpm->GetFrameStats(&stats, _videoFrame)); + video_frame_.ResetSize(); + video_frame_.set_width(width_); + video_frame_.set_height(0); + EXPECT_EQ(-3, vpm_->GetFrameStats(&stats, video_frame_)); - EXPECT_EQ(-1, _vpm->ColorEnhancement(&_videoFrame)); + EXPECT_EQ(-1, vpm_->ColorEnhancement(&video_frame_)); - EXPECT_EQ(-1, _vpm->Deflickering(&_videoFrame, &stats)); + EXPECT_EQ(-1, vpm_->Deflickering(&video_frame_, &stats)); - EXPECT_EQ(-1, _vpm->Denoising(&_videoFrame)); + EXPECT_EQ(-1, vpm_->Denoising(&video_frame_)); - EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats)); + EXPECT_EQ(-3, vpm_->BrightnessDetection(video_frame_, stats)); - EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->SetTargetResolution(0,0,0)); - EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->SetMaxFrameRate(0)); + EXPECT_EQ(VPM_PARAMETER_ERROR, vpm_->SetTargetResolution(0,0,0)); + EXPECT_EQ(VPM_PARAMETER_ERROR, vpm_->SetMaxFramerate(0)); - I420VideoFrame *outFrame = NULL; - EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->PreprocessFrame(_videoFrame, - &outFrame)); + I420VideoFrame *out_frame = NULL; + EXPECT_EQ(VPM_PARAMETER_ERROR, vpm_->PreprocessFrame(video_frame_, + &out_frame)); } -TEST_F(VideoProcessingModuleTest, IdenticalResultsAfterReset) -{ - I420VideoFrame videoFrame2; +TEST_F(VideoProcessingModuleTest, IdenticalResultsAfterReset) { + I420VideoFrame video_frame2; VideoProcessingModule::FrameStats stats; // Only testing non-static functions here. - scoped_array video_buffer(new uint8_t[_frame_length]); - ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length, - _sourceFile)); + scoped_array video_buffer(new uint8_t[frame_length_]); + ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_, + source_file_)); EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - _width, _height, - 0, kRotateNone, &_videoFrame)); - ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame)); - ASSERT_EQ(0, videoFrame2.CopyFrame(_videoFrame)); - ASSERT_EQ(0, _vpm->Deflickering(&_videoFrame, &stats)); - _vpm->Reset(); + width_, height_, + 0, kRotateNone, &video_frame_)); + ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_)); + ASSERT_EQ(0, video_frame2.CopyFrame(video_frame_)); + ASSERT_EQ(0, vpm_->Deflickering(&video_frame_, &stats)); + vpm_->Reset(); // Retrieve frame stats again in case Deflickering() has zeroed them. - ASSERT_EQ(0, _vpm->GetFrameStats(&stats, videoFrame2)); - ASSERT_EQ(0, _vpm->Deflickering(&videoFrame2, &stats)); - EXPECT_TRUE(CompareFrames(_videoFrame, videoFrame2)); + ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame2)); + ASSERT_EQ(0, vpm_->Deflickering(&video_frame2, &stats)); + EXPECT_TRUE(CompareFrames(video_frame_, video_frame2)); - ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length, - _sourceFile)); + ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_, + source_file_)); // Using ConvertToI420 to add stride to the image. EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - _width, _height, - 0, kRotateNone, &_videoFrame)); - videoFrame2.CopyFrame(_videoFrame); - EXPECT_TRUE(CompareFrames(_videoFrame, videoFrame2)); - ASSERT_GE(_vpm->Denoising(&_videoFrame), 0); - _vpm->Reset(); - ASSERT_GE(_vpm->Denoising(&videoFrame2), 0); - EXPECT_TRUE(CompareFrames(_videoFrame, videoFrame2)); + width_, height_, + 0, kRotateNone, &video_frame_)); + video_frame2.CopyFrame(video_frame_); + EXPECT_TRUE(CompareFrames(video_frame_, video_frame2)); + ASSERT_GE(vpm_->Denoising(&video_frame_), 0); + vpm_->Reset(); + ASSERT_GE(vpm_->Denoising(&video_frame2), 0); + EXPECT_TRUE(CompareFrames(video_frame_, video_frame2)); - ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length, - _sourceFile)); + ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_, + source_file_)); EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - _width, _height, - 0, kRotateNone, &_videoFrame)); - ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame)); - videoFrame2.CopyFrame(_videoFrame); - ASSERT_EQ(0, _vpm->BrightnessDetection(_videoFrame, stats)); - _vpm->Reset(); - ASSERT_EQ(0, _vpm->BrightnessDetection(videoFrame2, stats)); - EXPECT_TRUE(CompareFrames(_videoFrame, videoFrame2)); + width_, height_, + 0, kRotateNone, &video_frame_)); + ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_)); + video_frame2.CopyFrame(video_frame_); + ASSERT_EQ(0, vpm_->BrightnessDetection(video_frame_, stats)); + vpm_->Reset(); + ASSERT_EQ(0, vpm_->BrightnessDetection(video_frame2, stats)); + EXPECT_TRUE(CompareFrames(video_frame_, video_frame2)); } -TEST_F(VideoProcessingModuleTest, FrameStats) -{ +TEST_F(VideoProcessingModuleTest, FrameStats) { VideoProcessingModule::FrameStats stats; - scoped_array video_buffer(new uint8_t[_frame_length]); - ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length, - _sourceFile)); + scoped_array video_buffer(new uint8_t[frame_length_]); + ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_, + source_file_)); EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - _width, _height, - 0, kRotateNone, &_videoFrame)); + width_, height_, + 0, kRotateNone, &video_frame_)); - EXPECT_FALSE(_vpm->ValidFrameStats(stats)); - EXPECT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame)); - EXPECT_TRUE(_vpm->ValidFrameStats(stats)); + EXPECT_FALSE(vpm_->ValidFrameStats(stats)); + EXPECT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_)); + EXPECT_TRUE(vpm_->ValidFrameStats(stats)); printf("\nFrameStats\n"); - printf("mean: %u\nnumPixels: %u\nsubSamplWidth: " + printf("mean: %u\nnum_pixels: %u\nsubSamplWidth: " "%u\nsumSamplHeight: %u\nsum: %u\n\n", static_cast(stats.mean), - static_cast(stats.numPixels), + static_cast(stats.num_pixels), static_cast(stats.subSamplHeight), static_cast(stats.subSamplWidth), static_cast(stats.sum)); - _vpm->ClearFrameStats(&stats); - EXPECT_FALSE(_vpm->ValidFrameStats(stats)); + vpm_->ClearFrameStats(&stats); + EXPECT_FALSE(vpm_->ValidFrameStats(stats)); } -TEST_F(VideoProcessingModuleTest, PreprocessorLogic) -{ +TEST_F(VideoProcessingModuleTest, PreprocessorLogic) { // Disable temporal sampling (frame dropping). - _vpm->EnableTemporalDecimation(false); + vpm_->EnableTemporalDecimation(false); int resolution = 100; - EXPECT_EQ(VPM_OK, _vpm->SetMaxFrameRate(30)); - EXPECT_EQ(VPM_OK, _vpm->SetTargetResolution(resolution, resolution, 15)); - EXPECT_EQ(VPM_OK, _vpm->SetTargetResolution(resolution, resolution, 30)); + EXPECT_EQ(VPM_OK, vpm_->SetMaxFramerate(30)); + EXPECT_EQ(VPM_OK, vpm_->SetTargetResolution(resolution, resolution, 15)); + EXPECT_EQ(VPM_OK, vpm_->SetTargetResolution(resolution, resolution, 30)); // Disable spatial sampling. - _vpm->SetInputFrameResampleMode(kNoRescaling); - EXPECT_EQ(VPM_OK, _vpm->SetTargetResolution(resolution, resolution, 30)); - I420VideoFrame* outFrame = NULL; + vpm_->SetInputFrameResampleMode(kNoRescaling); + EXPECT_EQ(VPM_OK, vpm_->SetTargetResolution(resolution, resolution, 30)); + I420VideoFrame* out_frame = NULL; // Set rescaling => output frame != NULL. - _vpm->SetInputFrameResampleMode(kFastRescaling); - EXPECT_EQ(VPM_OK, _vpm->SetTargetResolution(resolution, resolution, 30)); - EXPECT_EQ(VPM_OK, _vpm->PreprocessFrame(_videoFrame, &outFrame)); - EXPECT_FALSE(outFrame == NULL); - if (outFrame) { - EXPECT_EQ(resolution, outFrame->width()); - EXPECT_EQ(resolution, outFrame->height()); + vpm_->SetInputFrameResampleMode(kFastRescaling); + EXPECT_EQ(VPM_OK, vpm_->SetTargetResolution(resolution, resolution, 30)); + EXPECT_EQ(VPM_OK, vpm_->PreprocessFrame(video_frame_, &out_frame)); + EXPECT_FALSE(out_frame == NULL); + if (out_frame) { + EXPECT_EQ(resolution, out_frame->width()); + EXPECT_EQ(resolution, out_frame->height()); } // No rescaling=> output frame = NULL. - _vpm->SetInputFrameResampleMode(kNoRescaling); - EXPECT_EQ(VPM_OK, _vpm->PreprocessFrame(_videoFrame, &outFrame)); - EXPECT_TRUE(outFrame == NULL); + vpm_->SetInputFrameResampleMode(kNoRescaling); + EXPECT_EQ(VPM_OK, vpm_->PreprocessFrame(video_frame_, &out_frame)); + EXPECT_TRUE(out_frame == NULL); } -TEST_F(VideoProcessingModuleTest, Resampler) -{ +TEST_F(VideoProcessingModuleTest, Resampler) { enum { NumRuns = 1 }; - int64_t minRuntime = 0; - int64_t avgRuntime = 0; + int64_t min_runtime = 0; + int64_t avg_runtime = 0; TickTime t0; TickTime t1; - TickInterval accTicks; + TickInterval acc_ticks; - rewind(_sourceFile); - ASSERT_TRUE(_sourceFile != NULL) << + rewind(source_file_); + ASSERT_TRUE(source_file_ != NULL) << "Cannot read input file \n"; // CA not needed here - _vpm->EnableContentAnalysis(false); + vpm_->EnableContentAnalysis(false); // no temporal decimation - _vpm->EnableTemporalDecimation(false); + vpm_->EnableTemporalDecimation(false); // Reading test frame - scoped_array video_buffer(new uint8_t[_frame_length]); - ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length, - _sourceFile)); + scoped_array video_buffer(new uint8_t[frame_length_]); + ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_, + source_file_)); // Using ConvertToI420 to add stride to the image. EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - _width, _height, - 0, kRotateNone, &_videoFrame)); + width_, height_, + 0, kRotateNone, &video_frame_)); - for (uint32_t runIdx = 0; runIdx < NumRuns; runIdx++) - { - // initiate test timer + for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++) { + // Initiate test timer. t0 = TickTime::Now(); // Init the sourceFrame with a timestamp. - _videoFrame.set_render_time_ms(t0.MillisecondTimestamp()); - _videoFrame.set_timestamp(t0.MillisecondTimestamp() * 90); + video_frame_.set_render_time_ms(t0.MillisecondTimestamp()); + video_frame_.set_timestamp(t0.MillisecondTimestamp() * 90); // Test scaling to different sizes: source is of |width|/|height| = 352/288. // Scaling mode in VPM is currently fixed to kScaleBox (mode = 3). - TestSize(_videoFrame, 100, 50, 3, 24.0, _vpm); - TestSize(_videoFrame, 352/4, 288/4, 3, 25.2, _vpm); - TestSize(_videoFrame, 352/2, 288/2, 3, 28.1, _vpm); - TestSize(_videoFrame, 352, 288, 3, -1, _vpm); // no resampling. - TestSize(_videoFrame, 2*352, 2*288, 3, 32.2, _vpm); - TestSize(_videoFrame, 400, 256, 3, 31.3, _vpm); - TestSize(_videoFrame, 480, 640, 3, 32.15, _vpm); - TestSize(_videoFrame, 960, 720, 3, 32.2, _vpm); - TestSize(_videoFrame, 1280, 720, 3, 32.15, _vpm); + TestSize(video_frame_, 100, 50, 3, 24.0, vpm_); + TestSize(video_frame_, 352/4, 288/4, 3, 25.2, vpm_); + TestSize(video_frame_, 352/2, 288/2, 3, 28.1, vpm_); + TestSize(video_frame_, 352, 288, 3, -1, vpm_); // no resampling. + TestSize(video_frame_, 2*352, 2*288, 3, 32.2, vpm_); + TestSize(video_frame_, 400, 256, 3, 31.3, vpm_); + TestSize(video_frame_, 480, 640, 3, 32.15, vpm_); + TestSize(video_frame_, 960, 720, 3, 32.2, vpm_); + TestSize(video_frame_, 1280, 720, 3, 32.15, vpm_); // Upsampling to odd size. - TestSize(_videoFrame, 501, 333, 3, 32.05, _vpm); + TestSize(video_frame_, 501, 333, 3, 32.05, vpm_); // Downsample to odd size. - TestSize(_videoFrame, 281, 175, 3, 29.3, _vpm); + TestSize(video_frame_, 281, 175, 3, 29.3, vpm_); // stop timer t1 = TickTime::Now(); - accTicks += (t1 - t0); + acc_ticks += (t1 - t0); - if (accTicks.Microseconds() < minRuntime || runIdx == 0) { - minRuntime = accTicks.Microseconds(); + if (acc_ticks.Microseconds() < min_runtime || run_idx == 0) { + min_runtime = acc_ticks.Microseconds(); } - avgRuntime += accTicks.Microseconds(); + avg_runtime += acc_ticks.Microseconds(); } printf("\nAverage run time = %d us / frame\n", - //static_cast(avgRuntime / frameNum / NumRuns)); - static_cast(avgRuntime)); + //static_cast(avg_runtime / frameNum / NumRuns)); + static_cast(avg_runtime)); printf("Min run time = %d us / frame\n\n", - //static_cast(minRuntime / frameNum)); - static_cast(minRuntime)); + //static_cast(min_runtime / frameNum)); + static_cast(min_runtime)); } -void TestSize(const I420VideoFrame& source_frame, int target_width, - int target_height, int mode, double expected_psnr, +void TestSize(const I420VideoFrame& source_frame, int targetwidth_, + int targetheight_, int mode, double expected_psnr, VideoProcessingModule* vpm) { - int source_width = source_frame.width(); - int source_height = source_frame.height(); + int sourcewidth_ = source_frame.width(); + int sourceheight_ = source_frame.height(); I420VideoFrame* out_frame = NULL; - ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(target_width, target_height, 30)); + ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(targetwidth_, targetheight_, 30)); ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(source_frame, &out_frame)); if (out_frame) { @@ -321,19 +312,19 @@ void TestSize(const I420VideoFrame& source_frame, int target_width, // (2) scale the resampled frame (|out_frame|) back to the original size and // compute PSNR relative to |source_frame| (for automatic verification). // (3) write out the processed frame for viewing. - if (target_width != static_cast(source_width) || - target_height != static_cast(source_height)) { + if (targetwidth_ != static_cast(sourcewidth_) || + targetheight_ != static_cast(sourceheight_)) { // Write the processed frame to file for visual inspection. std::ostringstream filename; filename << webrtc::test::OutputPath() << "Resampler_"<< mode << "_" << - "from_" << source_width << "x" << source_height << "_to_" << - target_width << "x" << target_height << "_30Hz_P420.yuv"; + "from_" << sourcewidth_ << "x" << sourceheight_ << "_to_" << + targetwidth_ << "x" << targetheight_ << "_30Hz_P420.yuv"; std::cout << "Watch " << filename.str() << " and verify that it is okay." << std::endl; FILE* stand_alone_file = fopen(filename.str().c_str(), "wb"); if (PrintI420VideoFrame(*out_frame, stand_alone_file) < 0) { fprintf(stderr, "Failed to write frame for scaling to width/height: " - " %d %d \n", target_width, target_height); + " %d %d \n", targetwidth_, targetheight_); return; } fclose(stand_alone_file); @@ -342,8 +333,8 @@ void TestSize(const I420VideoFrame& source_frame, int target_width, resampled_source_frame.CopyFrame(*out_frame); // Scale |resampled_source_frame| back to original/source size. - ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(source_width, - source_height, + ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(sourcewidth_, + sourceheight_, 30)); ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(resampled_source_frame, &out_frame)); @@ -351,14 +342,14 @@ void TestSize(const I420VideoFrame& source_frame, int target_width, // Write the processed frame to file for visual inspection. std::ostringstream filename2; filename2 << webrtc::test::OutputPath() << "Resampler_"<< mode << "_" << - "from_" << target_width << "x" << target_height << "_to_" << - source_width << "x" << source_height << "_30Hz_P420.yuv"; + "from_" << targetwidth_ << "x" << targetheight_ << "_to_" << + sourcewidth_ << "x" << sourceheight_ << "_30Hz_P420.yuv"; std::cout << "Watch " << filename2.str() << " and verify that it is okay." << std::endl; stand_alone_file = fopen(filename2.str().c_str(), "wb"); if (PrintI420VideoFrame(*out_frame, stand_alone_file) < 0) { fprintf(stderr, "Failed to write frame for scaling to width/height " - "%d %d \n", source_width, source_height); + "%d %d \n", sourcewidth_, sourceheight_); return; } fclose(stand_alone_file); @@ -368,7 +359,7 @@ void TestSize(const I420VideoFrame& source_frame, int target_width, EXPECT_GT(psnr, expected_psnr); printf("PSNR: %f. PSNR is between source of size %d %d, and a modified " "source which is scaled down/up to: %d %d, and back to source size \n", - psnr, source_width, source_height, target_width, target_height); + psnr, sourcewidth_, sourceheight_, targetwidth_, targetheight_); } } diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.h b/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.h index db8841b5da30..6daf9c2fd9d0 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.h +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.h @@ -18,31 +18,28 @@ namespace webrtc { -class VideoProcessingModuleTest : public ::testing::Test -{ -protected: - VideoProcessingModuleTest(); - virtual void SetUp(); - virtual void TearDown(); - static void SetUpTestCase() - { - Trace::CreateTrace(); - std::string trace_file = webrtc::test::OutputPath() + "VPMTrace.txt"; - ASSERT_EQ(0, Trace::SetTraceFile(trace_file.c_str())); - } - static void TearDownTestCase() - { - Trace::ReturnTrace(); - } - VideoProcessingModule* _vpm; - FILE* _sourceFile; - I420VideoFrame _videoFrame; - const int _width; - const int _half_width; - const int _height; - const int _size_y; - const int _size_uv; - const unsigned int _frame_length; +class VideoProcessingModuleTest : public ::testing::Test { + protected: + VideoProcessingModuleTest(); + virtual void SetUp(); + virtual void TearDown(); + static void SetUpTestCase() { + Trace::CreateTrace(); + std::string trace_file = webrtc::test::OutputPath() + "VPMTrace.txt"; + ASSERT_EQ(0, Trace::SetTraceFile(trace_file.c_str())); + } + static void TearDownTestCase() { + Trace::ReturnTrace(); + } + VideoProcessingModule* vpm_; + FILE* source_file_; + I420VideoFrame video_frame_; + const int width_; + const int half_width_; + const int height_; + const int size_y_; + const int size_uv_; + const unsigned int frame_length_; }; } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViERenderer.java b/media/webrtc/trunk/webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViERenderer.java index e2c9921deefb..9e832ae2d6e3 100644 --- a/media/webrtc/trunk/webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViERenderer.java +++ b/media/webrtc/trunk/webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViERenderer.java @@ -11,13 +11,21 @@ package org.webrtc.videoengine; import android.content.Context; +import android.util.Log; +import android.view.Surface; import android.view.SurfaceHolder; import android.view.SurfaceView; +import android.view.View; + +import org.mozilla.gecko.GeckoApp; +import org.mozilla.gecko.GeckoAppShell; +import org.mozilla.gecko.util.ThreadUtils; public class ViERenderer { + private final static String TAG = "WEBRTC-ViEREnderer"; // View used for local rendering that Cameras can use for Video Overlay. - private static SurfaceHolder g_localRenderer; + private static SurfaceHolder g_localRenderer = null; public static SurfaceView CreateRenderer(Context context) { return CreateRenderer(context, false); @@ -44,11 +52,43 @@ public class ViERenderer { // ViECapture::AllocateCaptureDevice // LinearLayout.addview // ViECapture::StartCapture - public static SurfaceView CreateLocalRenderer(Context context) { - SurfaceView localRender = new SurfaceView(context); - g_localRenderer = localRender.getHolder(); - g_localRenderer.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); - return localRender; + public static void CreateLocalRenderer() { + View cameraView = GeckoAppShell.getGeckoInterface().getCameraView(); + if (cameraView != null && (cameraView instanceof SurfaceView)) { + SurfaceView localRender = (SurfaceView)cameraView; + g_localRenderer = localRender.getHolder(); + } + + ThreadUtils.getUiHandler().post(new Runnable() { + @Override + public void run() { + try { + GeckoAppShell.getGeckoInterface().enableCameraView(); + } catch (Exception e) { + Log.e(TAG, "CreateLocalRenderer enableCameraView exception: " + + e.getLocalizedMessage()); + } + } + }); + } + + public static void DestroyLocalRenderer() { + if (g_localRenderer != null) { + g_localRenderer = null; + + ThreadUtils.getUiHandler().post(new Runnable() { + @Override + public void run() { + try { + GeckoAppShell.getGeckoInterface().disableCameraView(); + } catch (Exception e) { + Log.e(TAG, + "DestroyLocalRenderer disableCameraView exception: " + + e.getLocalizedMessage()); + } + } + }); + } } public static SurfaceHolder GetLocalRenderer() { diff --git a/media/webrtc/trunk/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.mm b/media/webrtc/trunk/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.mm index 41c7049ae17d..5c3d6fa781d7 100644 --- a/media/webrtc/trunk/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.mm +++ b/media/webrtc/trunk/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.mm @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "engine_configurations.h" +#include "webrtc/engine_configurations.h" #if defined(COCOA_RENDERING) #import "cocoa_render_view.h" diff --git a/media/webrtc/trunk/webrtc/modules/video_render/mac/video_render_nsopengl.mm b/media/webrtc/trunk/webrtc/modules/video_render/mac/video_render_nsopengl.mm index 0659496261ac..2faca657bbed 100644 --- a/media/webrtc/trunk/webrtc/modules/video_render/mac/video_render_nsopengl.mm +++ b/media/webrtc/trunk/webrtc/modules/video_render/mac/video_render_nsopengl.mm @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "engine_configurations.h" +#include "webrtc/engine_configurations.h" #if defined(COCOA_RENDERING) #include "video_render_nsopengl.h" @@ -16,7 +16,7 @@ #include "event_wrapper.h" #include "trace.h" #include "thread_wrapper.h" -#include "common_video/libyuv/include/webrtc_libyuv.h" +#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" namespace webrtc { diff --git a/media/webrtc/trunk/webrtc/modules/video_render/test/testAPI/testAPI_mac.mm b/media/webrtc/trunk/webrtc/modules/video_render/test/testAPI/testAPI_mac.mm index 0127a593bd35..8ebb9ce60924 100644 --- a/media/webrtc/trunk/webrtc/modules/video_render/test/testAPI/testAPI_mac.mm +++ b/media/webrtc/trunk/webrtc/modules/video_render/test/testAPI/testAPI_mac.mm @@ -18,14 +18,14 @@ #import #include -#include "common_types.h" -#import "webrtc/modules/video_render//mac/cocoa_render_view.h" -#include "module_common_types.h" -#include "process_thread.h" -#include "tick_util.h" -#include "trace.h" -#include "video_render_defines.h" -#include "video_render.h" +#import "webrtc/modules/video_render/mac/cocoa_render_view.h" +#include "webrtc/common_types.h" +#include "webrtc/modules/interface/module_common_types.h" +#include "webrtc/modules/utility/interface/process_thread.h" +#include "webrtc/modules/video_render/include/video_render.h" +#include "webrtc/modules/video_render/include/video_render_defines.h" +#include "webrtc/system_wrappers/interface/tick_util.h" +#include "webrtc/system_wrappers/interface/trace.h" using namespace webrtc; diff --git a/media/webrtc/trunk/webrtc/modules/video_render/video_render.gypi b/media/webrtc/trunk/webrtc/modules/video_render/video_render.gypi index 58bdcebf5d09..4f3844406e08 100644 --- a/media/webrtc/trunk/webrtc/modules/video_render/video_render.gypi +++ b/media/webrtc/trunk/webrtc/modules/video_render/video_render.gypi @@ -16,16 +16,6 @@ '<(webrtc_root)/common_video/common_video.gyp:common_video', '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', ], - 'include_dirs': [ - 'include', - '../interface', - ], - 'direct_dependent_settings': { - 'include_dirs': [ - 'include', - '../interface', - ], - }, 'sources': [ 'android/video_render_android_impl.cc', 'android/video_render_android_impl.h', @@ -150,11 +140,6 @@ }, }, }], - ['OS=="mac"', { - 'direct_dependent_settings': { - 'include_dirs': ['mac',], - }, - }], ['OS=="win" and include_internal_video_render==1', { 'variables': { # 'directx_sdk_path' will be overridden in the condition block @@ -243,10 +228,10 @@ 'target_name': 'video_render_tests_run', 'type': 'none', 'dependencies': [ - '<(import_isolate_path):import_isolate_gypi', 'video_render_tests', ], 'includes': [ + '../../build/isolate.gypi', 'video_render_tests.isolate', ], 'sources': [ diff --git a/media/webrtc/trunk/webrtc/modules/video_render/video_render_frames.cc b/media/webrtc/trunk/webrtc/modules/video_render/video_render_frames.cc index be5cac9aaa6f..7025d62a77a4 100644 --- a/media/webrtc/trunk/webrtc/modules/video_render/video_render_frames.cc +++ b/media/webrtc/trunk/webrtc/modules/video_render/video_render_frames.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "webrtc/modules/video_render//video_render_frames.h" +#include "webrtc/modules/video_render/video_render_frames.h" #include @@ -19,13 +19,12 @@ namespace webrtc { -const int32_t KEventMaxWaitTimeMs = 200; +const uint32_t KEventMaxWaitTimeMs = 200; const uint32_t kMinRenderDelayMs = 10; const uint32_t kMaxRenderDelayMs= 500; VideoRenderFrames::VideoRenderFrames() - : incoming_frames_(), - render_delay_ms_(10) { + : render_delay_ms_(10) { } VideoRenderFrames::~VideoRenderFrames() { @@ -35,12 +34,19 @@ VideoRenderFrames::~VideoRenderFrames() { int32_t VideoRenderFrames::AddFrame(I420VideoFrame* new_frame) { const int64_t time_now = TickTime::MillisecondTimestamp(); - if (new_frame->render_time_ms() + KOldRenderTimestampMS < time_now) { - WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1, + // Drop old frames only when there are other frames in the queue, otherwise, a + // really slow system never renders any frames. + if (!incoming_frames_.empty() && + new_frame->render_time_ms() + KOldRenderTimestampMS < time_now) { + WEBRTC_TRACE(kTraceWarning, + kTraceVideoRenderer, + -1, "%s: too old frame, timestamp=%u.", - __FUNCTION__, new_frame->timestamp()); + __FUNCTION__, + new_frame->timestamp()); return -1; } + if (new_frame->render_time_ms() > time_now + KFutureRenderTimestampMS) { WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1, "%s: frame too long into the future, timestamp=%u.", @@ -49,26 +55,23 @@ int32_t VideoRenderFrames::AddFrame(I420VideoFrame* new_frame) { } if (new_frame->native_handle() != NULL) { - incoming_frames_.PushBack(new TextureVideoFrame( + incoming_frames_.push_back(new TextureVideoFrame( static_cast(new_frame->native_handle()), new_frame->width(), new_frame->height(), new_frame->timestamp(), new_frame->render_time_ms())); - return incoming_frames_.GetSize(); + return static_cast(incoming_frames_.size()); } // Get an empty frame I420VideoFrame* frame_to_add = NULL; - if (!empty_frames_.Empty()) { - ListItem* item = empty_frames_.First(); - if (item) { - frame_to_add = static_cast(item->GetItem()); - empty_frames_.Erase(item); - } + if (!empty_frames_.empty()) { + frame_to_add = empty_frames_.front(); + empty_frames_.pop_front(); } if (!frame_to_add) { - if (empty_frames_.GetSize() + incoming_frames_.GetSize() > + if (empty_frames_.size() + incoming_frames_.size() > KMaxNumberOfFrames) { // Already allocated too many frames. WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, @@ -80,7 +83,7 @@ int32_t VideoRenderFrames::AddFrame(I420VideoFrame* new_frame) { // Allocate new memory. WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, -1, "%s: allocating buffer %d", __FUNCTION__, - empty_frames_.GetSize() + incoming_frames_.GetSize()); + empty_frames_.size() + incoming_frames_.size()); frame_to_add = new I420VideoFrame(); if (!frame_to_add) { @@ -97,33 +100,28 @@ int32_t VideoRenderFrames::AddFrame(I420VideoFrame* new_frame) { // TODO(mflodman) Change this! // Remove const ness. Copying will be costly. frame_to_add->SwapFrame(new_frame); - incoming_frames_.PushBack(frame_to_add); + incoming_frames_.push_back(frame_to_add); - return incoming_frames_.GetSize(); + return static_cast(incoming_frames_.size()); } I420VideoFrame* VideoRenderFrames::FrameToRender() { I420VideoFrame* render_frame = NULL; - while (!incoming_frames_.Empty()) { - ListItem* item = incoming_frames_.First(); - if (item) { - I420VideoFrame* oldest_frame_in_list = - static_cast(item->GetItem()); - if (oldest_frame_in_list->render_time_ms() <= - TickTime::MillisecondTimestamp() + render_delay_ms_) { - // This is the oldest one so far and it's OK to render. - if (render_frame) { - // This one is older than the newly found frame, remove this one. - ReturnFrame(render_frame); - } - render_frame = oldest_frame_in_list; - incoming_frames_.Erase(item); - } else { - // We can't release this one yet, we're done here. - break; + FrameList::iterator iter = incoming_frames_.begin(); + while(iter != incoming_frames_.end()) { + I420VideoFrame* oldest_frame_in_list = *iter; + if (oldest_frame_in_list->render_time_ms() <= + TickTime::MillisecondTimestamp() + render_delay_ms_) { + // This is the oldest one so far and it's OK to render. + if (render_frame) { + // This one is older than the newly found frame, remove this one. + ReturnFrame(render_frame); } + render_frame = oldest_frame_in_list; + iter = incoming_frames_.erase(iter); } else { - assert(false); + // We can't release this one yet, we're done here. + break; } } return render_frame; @@ -135,7 +133,7 @@ int32_t VideoRenderFrames::ReturnFrame(I420VideoFrame* old_frame) { old_frame->ResetSize(); old_frame->set_timestamp(0); old_frame->set_render_time_ms(0); - empty_frames_.PushBack(old_frame); + empty_frames_.push_back(old_frame); } else { delete old_frame; } @@ -143,40 +141,29 @@ int32_t VideoRenderFrames::ReturnFrame(I420VideoFrame* old_frame) { } int32_t VideoRenderFrames::ReleaseAllFrames() { - while (!incoming_frames_.Empty()) { - ListItem* item = incoming_frames_.First(); - if (item) { - I420VideoFrame* frame = static_cast(item->GetItem()); - assert(frame != NULL); - delete frame; - } - incoming_frames_.Erase(item); + for (FrameList::iterator iter = incoming_frames_.begin(); + iter != incoming_frames_.end(); ++iter) { + delete *iter; } - while (!empty_frames_.Empty()) { - ListItem* item = empty_frames_.First(); - if (item) { - I420VideoFrame* frame = static_cast(item->GetItem()); - assert(frame != NULL); - delete frame; - } - empty_frames_.Erase(item); + incoming_frames_.clear(); + + for (FrameList::iterator iter = empty_frames_.begin(); + iter != empty_frames_.end(); ++iter) { + delete *iter; } + empty_frames_.clear(); return 0; } uint32_t VideoRenderFrames::TimeToNextFrameRelease() { - int64_t time_to_release = 0; - ListItem* item = incoming_frames_.First(); - if (item) { - I420VideoFrame* oldest_frame = - static_cast(item->GetItem()); - time_to_release = oldest_frame->render_time_ms() - render_delay_ms_ - - TickTime::MillisecondTimestamp(); - if (time_to_release < 0) { - time_to_release = 0; - } - } else { - time_to_release = KEventMaxWaitTimeMs; + if (incoming_frames_.empty()) { + return KEventMaxWaitTimeMs; + } + I420VideoFrame* oldest_frame = incoming_frames_.front(); + int64_t time_to_release = oldest_frame->render_time_ms() - render_delay_ms_ + - TickTime::MillisecondTimestamp(); + if (time_to_release < 0) { + time_to_release = 0; } return static_cast(time_to_release); } diff --git a/media/webrtc/trunk/webrtc/modules/video_render/video_render_frames.h b/media/webrtc/trunk/webrtc/modules/video_render/video_render_frames.h index 1a5d64d8072e..d2e887f8272e 100644 --- a/media/webrtc/trunk/webrtc/modules/video_render/video_render_frames.h +++ b/media/webrtc/trunk/webrtc/modules/video_render/video_render_frames.h @@ -11,8 +11,9 @@ #ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_ // NOLINT #define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_ // NOLINT +#include + #include "webrtc/modules/video_render/include/video_render.h" -#include "webrtc/system_wrappers/interface/list_wrapper.h" namespace webrtc { @@ -41,6 +42,8 @@ class VideoRenderFrames { int32_t SetRenderDelay(const uint32_t render_delay); private: + typedef std::list FrameList; + // 10 seconds for 30 fps. enum { KMaxNumberOfFrames = 300 }; // Don't render frames with timestamp older than 500ms from now. @@ -49,9 +52,9 @@ class VideoRenderFrames { enum { KFutureRenderTimestampMS = 10000 }; // Sorted list with framed to be rendered, oldest first. - ListWrapper incoming_frames_; + FrameList incoming_frames_; // Empty frames. - ListWrapper empty_frames_; + FrameList empty_frames_; // Estimated delay from a frame is released until it's rendered. uint32_t render_delay_ms_; diff --git a/media/webrtc/trunk/webrtc/modules/video_render/video_render_tests.isolate b/media/webrtc/trunk/webrtc/modules/video_render/video_render_tests.isolate index bd596be56cd3..397ec04e8c7d 100644 --- a/media/webrtc/trunk/webrtc/modules/video_render/video_render_tests.isolate +++ b/media/webrtc/trunk/webrtc/modules/video_render/video_render_tests.isolate @@ -21,16 +21,15 @@ 'variables': { 'command': [ '../../../testing/test_env.py', - '../../../tools/swarm_client/googletest/run_test_cases.py', '<(PRODUCT_DIR)/video_render_tests<(EXECUTABLE_SUFFIX)', ], 'isolate_dependency_tracked': [ '../../../testing/test_env.py', - '../../../tools/swarm_client/run_isolated.py', - '../../../tools/swarm_client/googletest/run_test_cases.py', - '../../../tools/swarm_client/third_party/upload.py', '<(PRODUCT_DIR)/video_render_tests<(EXECUTABLE_SUFFIX)', ], + 'isolate_dependency_untracked': [ + '../../../tools/swarming_client/', + ], }, }], ], diff --git a/media/webrtc/trunk/webrtc/supplement.gypi b/media/webrtc/trunk/webrtc/supplement.gypi new file mode 100644 index 000000000000..7898d4cbdf45 --- /dev/null +++ b/media/webrtc/trunk/webrtc/supplement.gypi @@ -0,0 +1,5 @@ +{ + 'variables': { + 'build_with_chromium': 0, + } +} diff --git a/media/webrtc/trunk/webrtc/system_wrappers/interface/asm_defines.h b/media/webrtc/trunk/webrtc/system_wrappers/interface/asm_defines.h index ad44072de397..ff2ebc4d8898 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/interface/asm_defines.h +++ b/media/webrtc/trunk/webrtc/system_wrappers/interface/asm_defines.h @@ -56,4 +56,4 @@ strheq \reg1, \reg2, \num .text -#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_COMPILE_ASSERT_H_ +#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ASM_DEFINES_H_ diff --git a/media/webrtc/trunk/webrtc/system_wrappers/interface/compile_assert.h b/media/webrtc/trunk/webrtc/system_wrappers/interface/compile_assert.h index 4feda86c3a37..cdeaa5676bb5 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/interface/compile_assert.h +++ b/media/webrtc/trunk/webrtc/system_wrappers/interface/compile_assert.h @@ -8,14 +8,76 @@ * be found in the AUTHORS file in the root of the source tree. */ +// Borrowed from Chromium's src/base/basictypes.h. + #ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_COMPILE_ASSERT_H_ #define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_COMPILE_ASSERT_H_ -/* Use this macro to verify at compile time that certain restrictions are met. - * The argument is the boolean expression to evaluate. - * Example: - * COMPILE_ASSERT(sizeof(foo) < 128); -*/ -#define COMPILE_ASSERT(expression) switch(0){case 0: case expression:;} +// The COMPILE_ASSERT macro can be used to verify that a compile time +// expression is true. For example, you could use it to verify the +// size of a static array: +// +// COMPILE_ASSERT(ARRAYSIZE_UNSAFE(content_type_names) == CONTENT_NUM_TYPES, +// content_type_names_incorrect_size); +// +// or to make sure a struct is smaller than a certain size: +// +// COMPILE_ASSERT(sizeof(foo) < 128, foo_too_large); +// +// The second argument to the macro is the name of the variable. If +// the expression is false, most compilers will issue a warning/error +// containing the name of the variable. + +// TODO(ajm): Hack to avoid multiple definitions until the base/ of webrtc and +// libjingle are merged. +#if !defined(COMPILE_ASSERT) +template +struct CompileAssert { +}; + +#define COMPILE_ASSERT(expr, msg) \ + typedef CompileAssert<(bool(expr))> msg[bool(expr) ? 1 : -1] +#endif // COMPILE_ASSERT + +// Implementation details of COMPILE_ASSERT: +// +// - COMPILE_ASSERT works by defining an array type that has -1 +// elements (and thus is invalid) when the expression is false. +// +// - The simpler definition +// +// #define COMPILE_ASSERT(expr, msg) typedef char msg[(expr) ? 1 : -1] +// +// does not work, as gcc supports variable-length arrays whose sizes +// are determined at run-time (this is gcc's extension and not part +// of the C++ standard). As a result, gcc fails to reject the +// following code with the simple definition: +// +// int foo; +// COMPILE_ASSERT(foo, msg); // not supposed to compile as foo is +// // not a compile-time constant. +// +// - By using the type CompileAssert<(bool(expr))>, we ensures that +// expr is a compile-time constant. (Template arguments must be +// determined at compile-time.) +// +// - The outer parentheses in CompileAssert<(bool(expr))> are necessary +// to work around a bug in gcc 3.4.4 and 4.0.1. If we had written +// +// CompileAssert +// +// instead, these compilers will refuse to compile +// +// COMPILE_ASSERT(5 > 0, some_message); +// +// (They seem to think the ">" in "5 > 0" marks the end of the +// template argument list.) +// +// - The array size is (bool(expr) ? 1 : -1), instead of simply +// +// ((expr) ? 1 : -1). +// +// This is to avoid running into a bug in MS VC 7.1, which +// causes ((0.0) ? 1 : -1) to incorrectly evaluate to 1. #endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_COMPILE_ASSERT_H_ diff --git a/media/webrtc/trunk/webrtc/system_wrappers/interface/compile_assert_c.h b/media/webrtc/trunk/webrtc/system_wrappers/interface/compile_assert_c.h new file mode 100644 index 000000000000..d9ba86600cca --- /dev/null +++ b/media/webrtc/trunk/webrtc/system_wrappers/interface/compile_assert_c.h @@ -0,0 +1,22 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_COMPILE_ASSERT_H_ +#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_COMPILE_ASSERT_H_ + +// Only use this for C files. For C++, use compile_assert.h. +// +// Use this macro to verify at compile time that certain restrictions are met. +// The argument is the boolean expression to evaluate. +// Example: +// COMPILE_ASSERT(sizeof(foo) < 128); +#define COMPILE_ASSERT(expression) switch (0) {case 0: case expression:;} + +#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_COMPILE_ASSERT_H_ diff --git a/media/webrtc/trunk/webrtc/system_wrappers/interface/cpu_features_wrapper.h b/media/webrtc/trunk/webrtc/system_wrappers/interface/cpu_features_wrapper.h index 85152ba1f5f7..5697c4916414 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/interface/cpu_features_wrapper.h +++ b/media/webrtc/trunk/webrtc/system_wrappers/interface/cpu_features_wrapper.h @@ -15,7 +15,7 @@ extern "C" { #endif -#include +#include "webrtc/typedefs.h" // List of features in x86. typedef enum { diff --git a/media/webrtc/trunk/webrtc/system_wrappers/interface/critical_section_wrapper.h b/media/webrtc/trunk/webrtc/system_wrappers/interface/critical_section_wrapper.h index 0253a282d187..4979b5c7dd56 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/interface/critical_section_wrapper.h +++ b/media/webrtc/trunk/webrtc/system_wrappers/interface/critical_section_wrapper.h @@ -15,9 +15,10 @@ // read/write locks instead. #include "webrtc/common_types.h" +#include "webrtc/system_wrappers/interface/thread_annotations.h" namespace webrtc { -class CriticalSectionWrapper { +class LOCKABLE CriticalSectionWrapper { public: // Factory method, constructor disabled static CriticalSectionWrapper* CreateCriticalSection(); @@ -26,33 +27,25 @@ class CriticalSectionWrapper { // Tries to grab lock, beginning of a critical section. Will wait for the // lock to become available if the grab failed. - virtual void Enter() = 0; + virtual void Enter() EXCLUSIVE_LOCK_FUNCTION() = 0; // Returns a grabbed lock, end of critical section. - virtual void Leave() = 0; + virtual void Leave() UNLOCK_FUNCTION() = 0; }; // RAII extension of the critical section. Prevents Enter/Leave mismatches and // provides more compact critical section syntax. -class CriticalSectionScoped { +class SCOPED_LOCKABLE CriticalSectionScoped { public: explicit CriticalSectionScoped(CriticalSectionWrapper* critsec) - : ptr_crit_sec_(critsec) { + EXCLUSIVE_LOCK_FUNCTION(critsec) + : ptr_crit_sec_(critsec) { ptr_crit_sec_->Enter(); } - ~CriticalSectionScoped() { - if (ptr_crit_sec_) { - Leave(); - } - } + ~CriticalSectionScoped() UNLOCK_FUNCTION() { ptr_crit_sec_->Leave(); } private: - void Leave() { - ptr_crit_sec_->Leave(); - ptr_crit_sec_ = 0; - } - CriticalSectionWrapper* ptr_crit_sec_; }; diff --git a/media/webrtc/trunk/webrtc/system_wrappers/interface/file_wrapper.h b/media/webrtc/trunk/webrtc/system_wrappers/interface/file_wrapper.h index 5de6774ad24c..68dc00501b30 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/interface/file_wrapper.h +++ b/media/webrtc/trunk/webrtc/system_wrappers/interface/file_wrapper.h @@ -12,6 +12,7 @@ #define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_FILE_WRAPPER_H_ #include +#include #include "webrtc/common_types.h" #include "webrtc/typedefs.h" @@ -37,6 +38,14 @@ class FileWrapper : public InStream, public OutStream { bool loop = false, bool text = false) = 0; + // Initializes the wrapper from an existing handle. |read_only| must match in + // the mode the file was opened in. If |manage_file| is true, the wrapper + // takes ownership of |handle| and closes it in CloseFile(). + virtual int OpenFromFileHandle(FILE* handle, + bool manage_file, + bool read_only, + bool loop = false) = 0; + virtual int CloseFile() = 0; // Limits the file size to |bytes|. Writing will fail after the cap diff --git a/media/webrtc/trunk/webrtc/system_wrappers/interface/list_wrapper.h b/media/webrtc/trunk/webrtc/system_wrappers/interface/list_wrapper.h deleted file mode 100644 index fe6607195a5a..000000000000 --- a/media/webrtc/trunk/webrtc/system_wrappers/interface/list_wrapper.h +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_LIST_WRAPPER_H_ -#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_LIST_WRAPPER_H_ - -#include "webrtc/system_wrappers/interface/constructor_magic.h" - -namespace webrtc { - -class CriticalSectionWrapper; - -class ListItem { - friend class ListWrapper; - - public: - ListItem(const void* ptr); - ListItem(const unsigned int item); - virtual ~ListItem(); - void* GetItem() const; - unsigned int GetUnsignedItem() const; - - protected: - ListItem* next_; - ListItem* prev_; - - private: - const void* item_ptr_; - const unsigned int item_; -}; - -class ListWrapper { - public: - ListWrapper(); - virtual ~ListWrapper(); - - // Returns the number of elements stored in the list. - unsigned int GetSize() const; - - // Puts a pointer to anything last in the list. - int PushBack(const void* ptr); - // Puts a pointer to anything first in the list. - int PushFront(const void* ptr); - - // Puts a copy of the specified integer last in the list. - int PushBack(const unsigned int item_id); - // Puts a copy of the specified integer first in the list. - int PushFront(const unsigned int item_id); - - // Pops the first ListItem from the list - int PopFront(); - - // Pops the last ListItem from the list - int PopBack(); - - // Returns true if the list is empty - bool Empty() const; - - // Returns a pointer to the first ListItem in the list. - ListItem* First() const; - - // Returns a pointer to the last ListItem in the list. - ListItem* Last() const; - - // Returns a pointer to the ListItem stored after item in the list. - ListItem* Next(ListItem* item) const; - - // Returns a pointer to the ListItem stored before item in the list. - ListItem* Previous(ListItem* item) const; - - // Removes item from the list. - int Erase(ListItem* item); - - // Insert list item after existing_previous_item. Please note that new_item - // must be created using new ListItem(). The map will take ownership of - // new_item following a successfull insert. If insert fails new_item will - // not be released by the List - int Insert(ListItem* existing_previous_item, - ListItem* new_item); - - // Insert list item before existing_next_item. Please note that new_item - // must be created using new ListItem(). The map will take ownership of - // new_item following a successfull insert. If insert fails new_item will - // not be released by the List - int InsertBefore(ListItem* existing_next_item, - ListItem* new_item); - - private: - void PushBackImpl(ListItem* item); - void PushFrontImpl(ListItem* item); - - CriticalSectionWrapper* critical_section_; - ListItem* first_; - ListItem* last_; - unsigned int size_; -}; - -} // namespace webrtc - -#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_LIST_WRAPPER_H_ diff --git a/media/webrtc/trunk/webrtc/system_wrappers/interface/logcat_trace_context.h b/media/webrtc/trunk/webrtc/system_wrappers/interface/logcat_trace_context.h new file mode 100644 index 000000000000..d23e451f2e15 --- /dev/null +++ b/media/webrtc/trunk/webrtc/system_wrappers/interface/logcat_trace_context.h @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_LOGCAT_TRACE_CONTEXT_H_ +#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_LOGCAT_TRACE_CONTEXT_H_ + +#include "webrtc/system_wrappers/interface/trace.h" + +#ifndef ANDROID +#error This file only makes sense to include on Android! +#endif + +namespace webrtc { + +// Scoped helper class for directing Traces to Android's logcat facility. While +// this object lives, Trace output will be sent to logcat. +class LogcatTraceContext : public webrtc::TraceCallback { + public: + LogcatTraceContext(); + virtual ~LogcatTraceContext(); + + // TraceCallback impl. + virtual void Print(TraceLevel level, const char* message, int length); +}; + +} // namespace webrtc + +#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_LOGCAT_TRACE_CONTEXT_H_ diff --git a/media/webrtc/trunk/webrtc/system_wrappers/interface/logging.h b/media/webrtc/trunk/webrtc/system_wrappers/interface/logging.h index d3b200e13bc1..41c436b1f3d0 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/interface/logging.h +++ b/media/webrtc/trunk/webrtc/system_wrappers/interface/logging.h @@ -92,8 +92,6 @@ class LogMessage { ////////////////////////////////////////////////////////////////////// #ifndef LOG -#if defined(WEBRTC_LOGGING) - // The following non-obvious technique for implementation of a // conditional log stream was stolen from google3/base/logging.h. @@ -109,8 +107,16 @@ class LogMessageVoidify { void operator&(std::ostream&) { } }; +#if defined(WEBRTC_RESTRICT_LOGGING) +// This should compile away logs matching the following condition. +#define RESTRICT_LOGGING_PRECONDITION(sev) \ + sev < webrtc::LS_INFO ? (void) 0 : +#else +#define RESTRICT_LOGGING_PRECONDITION(sev) +#endif + #define LOG_SEVERITY_PRECONDITION(sev) \ - !(webrtc::LogMessage::Loggable(sev)) \ + RESTRICT_LOGGING_PRECONDITION(sev) !(webrtc::LogMessage::Loggable(sev)) \ ? (void) 0 \ : webrtc::LogMessageVoidify() & @@ -131,19 +137,6 @@ class LogMessageVoidify { #define LOG_F(sev) LOG(sev) << __FUNCTION__ << ": " #endif -#else // !defined(WEBRTC_LOGGING) - -// Hopefully, the compiler will optimize away some of this code. -// Note: syntax of "1 ? (void)0 : LogMessage" was causing errors in g++, -// converted to "while (false)" -#define LOG(sev) \ - while (false)webrtc::LogMessage(NULL, 0, webrtc::sev).stream() -#define LOG_V(sev) \ - while (false) webrtc::LogMessage(NULL, 0, sev).stream() -#define LOG_F(sev) LOG(sev) << __FUNCTION__ << ": " - -#endif // !defined(WEBRTC_LOGGING) - #define LOG_API0() LOG_F(LS_VERBOSE) #define LOG_API1(v1) LOG_API0() << #v1 << "=" << v1 #define LOG_API2(v1, v2) LOG_API1(v1) \ diff --git a/media/webrtc/trunk/webrtc/system_wrappers/interface/rw_lock_wrapper.h b/media/webrtc/trunk/webrtc/system_wrappers/interface/rw_lock_wrapper.h index 80eb5da8a97d..91126e5d78e4 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/interface/rw_lock_wrapper.h +++ b/media/webrtc/trunk/webrtc/system_wrappers/interface/rw_lock_wrapper.h @@ -11,35 +11,36 @@ #ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_RW_LOCK_WRAPPER_H_ #define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_RW_LOCK_WRAPPER_H_ +#include "webrtc/system_wrappers/interface/thread_annotations.h" + // Note, Windows pre-Vista version of RW locks are not supported natively. For // these OSs regular critical sections have been used to approximate RW lock // functionality and will therefore have worse performance. namespace webrtc { -class RWLockWrapper { +class LOCKABLE RWLockWrapper { public: static RWLockWrapper* CreateRWLock(); virtual ~RWLockWrapper() {} - virtual void AcquireLockExclusive() = 0; - virtual void ReleaseLockExclusive() = 0; + virtual void AcquireLockExclusive() EXCLUSIVE_LOCK_FUNCTION() = 0; + virtual void ReleaseLockExclusive() UNLOCK_FUNCTION() = 0; - virtual void AcquireLockShared() = 0; - virtual void ReleaseLockShared() = 0; + virtual void AcquireLockShared() SHARED_LOCK_FUNCTION() = 0; + virtual void ReleaseLockShared() UNLOCK_FUNCTION() = 0; }; // RAII extensions of the RW lock. Prevents Acquire/Release missmatches and // provides more compact locking syntax. -class ReadLockScoped { +class SCOPED_LOCKABLE ReadLockScoped { public: - ReadLockScoped(RWLockWrapper& rw_lock) - : - rw_lock_(rw_lock) { + ReadLockScoped(RWLockWrapper& rw_lock) SHARED_LOCK_FUNCTION(rw_lock) + : rw_lock_(rw_lock) { rw_lock_.AcquireLockShared(); } - ~ReadLockScoped() { + ~ReadLockScoped() UNLOCK_FUNCTION() { rw_lock_.ReleaseLockShared(); } @@ -47,15 +48,14 @@ class ReadLockScoped { RWLockWrapper& rw_lock_; }; -class WriteLockScoped { +class SCOPED_LOCKABLE WriteLockScoped { public: - WriteLockScoped(RWLockWrapper& rw_lock) - : - rw_lock_(rw_lock) { + WriteLockScoped(RWLockWrapper& rw_lock) EXCLUSIVE_LOCK_FUNCTION(rw_lock) + : rw_lock_(rw_lock) { rw_lock_.AcquireLockExclusive(); } - ~WriteLockScoped() { + ~WriteLockScoped() UNLOCK_FUNCTION() { rw_lock_.ReleaseLockExclusive(); } diff --git a/media/webrtc/trunk/webrtc/system_wrappers/interface/scoped_ptr.h b/media/webrtc/trunk/webrtc/system_wrappers/interface/scoped_ptr.h index cfaf5cbe9052..aeac77ac95bd 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/interface/scoped_ptr.h +++ b/media/webrtc/trunk/webrtc/system_wrappers/interface/scoped_ptr.h @@ -1,118 +1,573 @@ -// (C) Copyright Greg Colvin and Beman Dawes 1998, 1999. -// Copyright (c) 2001, 2002 Peter Dimov -// -// Permission to copy, use, modify, sell and distribute this software -// is granted provided this copyright notice appears in all copies. -// This software is provided "as is" without express or implied -// warranty, and with no claim as to its suitability for any purpose. -// -// See http://www.boost.org/libs/smart_ptr/scoped_ptr.htm for documentation. -// +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ -// scoped_ptr mimics a built-in pointer except that it guarantees deletion -// of the object pointed to, either on destruction of the scoped_ptr or via -// an explicit reset(). scoped_ptr is a simple solution for simple needs; -// use shared_ptr or std::auto_ptr if your needs are more complex. +// Borrowed from Chromium's src/base/memory/scoped_ptr.h. -// scoped_ptr_malloc added in by Google. When one of -// these goes out of scope, instead of doing a delete or delete[], it -// calls free(). scoped_ptr_malloc is likely to see much more -// use than any other specializations. +// Scopers help you manage ownership of a pointer, helping you easily manage the +// a pointer within a scope, and automatically destroying the pointer at the +// end of a scope. There are two main classes you will use, which correspond +// to the operators new/delete and new[]/delete[]. +// +// Example usage (scoped_ptr): +// { +// scoped_ptr foo(new Foo("wee")); +// } // foo goes out of scope, releasing the pointer with it. +// +// { +// scoped_ptr foo; // No pointer managed. +// foo.reset(new Foo("wee")); // Now a pointer is managed. +// foo.reset(new Foo("wee2")); // Foo("wee") was destroyed. +// foo.reset(new Foo("wee3")); // Foo("wee2") was destroyed. +// foo->Method(); // Foo::Method() called. +// foo.get()->Method(); // Foo::Method() called. +// SomeFunc(foo.release()); // SomeFunc takes ownership, foo no longer +// // manages a pointer. +// foo.reset(new Foo("wee4")); // foo manages a pointer again. +// foo.reset(); // Foo("wee4") destroyed, foo no longer +// // manages a pointer. +// } // foo wasn't managing a pointer, so nothing was destroyed. +// +// Example usage (scoped_ptr): +// { +// scoped_ptr foo(new Foo[100]); +// foo.get()->Method(); // Foo::Method on the 0th element. +// foo[10].Method(); // Foo::Method on the 10th element. +// } +// +// These scopers also implement part of the functionality of C++11 unique_ptr +// in that they are "movable but not copyable." You can use the scopers in +// the parameter and return types of functions to signify ownership transfer +// in to and out of a function. When calling a function that has a scoper +// as the argument type, it must be called with the result of an analogous +// scoper's Pass() function or another function that generates a temporary; +// passing by copy will NOT work. Here is an example using scoped_ptr: +// +// void TakesOwnership(scoped_ptr arg) { +// // Do something with arg +// } +// scoped_ptr CreateFoo() { +// // No need for calling Pass() because we are constructing a temporary +// // for the return value. +// return scoped_ptr(new Foo("new")); +// } +// scoped_ptr PassThru(scoped_ptr arg) { +// return arg.Pass(); +// } +// +// { +// scoped_ptr ptr(new Foo("yay")); // ptr manages Foo("yay"). +// TakesOwnership(ptr.Pass()); // ptr no longer owns Foo("yay"). +// scoped_ptr ptr2 = CreateFoo(); // ptr2 owns the return Foo. +// scoped_ptr ptr3 = // ptr3 now owns what was in ptr2. +// PassThru(ptr2.Pass()); // ptr2 is correspondingly NULL. +// } +// +// Notice that if you do not call Pass() when returning from PassThru(), or +// when invoking TakesOwnership(), the code will not compile because scopers +// are not copyable; they only implement move semantics which require calling +// the Pass() function to signify a destructive transfer of state. CreateFoo() +// is different though because we are constructing a temporary on the return +// line and thus can avoid needing to call Pass(). +// +// Pass() properly handles upcast in initialization, i.e. you can use a +// scoped_ptr to initialize a scoped_ptr: +// +// scoped_ptr foo(new Foo()); +// scoped_ptr parent(foo.Pass()); +// +// PassAs<>() should be used to upcast return value in return statement: +// +// scoped_ptr CreateFoo() { +// scoped_ptr result(new FooChild()); +// return result.PassAs(); +// } +// +// Note that PassAs<>() is implemented only for scoped_ptr, but not for +// scoped_ptr. This is because casting array pointers may not be safe. -// release() added in by Google. Use this to conditionally -// transfer ownership of a heap-allocated object to the caller, usually on -// method success. #ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_SCOPED_PTR_H_ #define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_SCOPED_PTR_H_ -#include // for assert -#include // for ptrdiff_t -#include // for free() decl +// This is an implementation designed to match the anticipated future TR2 +// implementation of the scoped_ptr class and scoped_ptr_malloc (deprecated). -#ifdef _WIN32 -namespace std { using ::ptrdiff_t; }; -#endif // _WIN32 +#include +#include +#include + +#include // For std::swap(). + +#include "webrtc/system_wrappers/interface/compile_assert.h" +#include "webrtc/system_wrappers/interface/constructor_magic.h" +#include "webrtc/system_wrappers/interface/template_util.h" +#include "webrtc/system_wrappers/source/move.h" +#include "webrtc/typedefs.h" namespace webrtc { -template -class scoped_ptr { - private: - - T* ptr; - - scoped_ptr(scoped_ptr const &); - scoped_ptr & operator=(scoped_ptr const &); - - public: - - typedef T element_type; - - explicit scoped_ptr(T* p = NULL): ptr(p) {} - - ~scoped_ptr() { - typedef char type_must_be_complete[sizeof(T)]; +// Function object which deletes its parameter, which must be a pointer. +// If C is an array type, invokes 'delete[]' on the parameter; otherwise, +// invokes 'delete'. The default deleter for scoped_ptr. +template +struct DefaultDeleter { + DefaultDeleter() {} + template DefaultDeleter(const DefaultDeleter& other) { + // IMPLEMENTATION NOTE: C++11 20.7.1.1.2p2 only provides this constructor + // if U* is implicitly convertible to T* and U is not an array type. + // + // Correct implementation should use SFINAE to disable this + // constructor. However, since there are no other 1-argument constructors, + // using a COMPILE_ASSERT() based on is_convertible<> and requiring + // complete types is simpler and will cause compile failures for equivalent + // misuses. + // + // Note, the is_convertible check also ensures that U is not an + // array. T is guaranteed to be a non-array, so any U* where U is an array + // cannot convert to T*. + enum { T_must_be_complete = sizeof(T) }; + enum { U_must_be_complete = sizeof(U) }; + COMPILE_ASSERT((webrtc::is_convertible::value), + U_ptr_must_implicitly_convert_to_T_ptr); + } + inline void operator()(T* ptr) const { + enum { type_must_be_complete = sizeof(T) }; delete ptr; } - - void reset(T* p = NULL) { - typedef char type_must_be_complete[sizeof(T)]; - - if (ptr != p) { - T* obj = ptr; - ptr = p; - // Delete last, in case obj destructor indirectly results in ~scoped_ptr - delete obj; - } - } - - T& operator*() const { - assert(ptr != NULL); - return *ptr; - } - - T* operator->() const { - assert(ptr != NULL); - return ptr; - } - - T* get() const { - return ptr; - } - - void swap(scoped_ptr & b) { - T* tmp = b.ptr; - b.ptr = ptr; - ptr = tmp; - } - - T* release() { - T* tmp = ptr; - ptr = NULL; - return tmp; - } - - T** accept() { - if (ptr) { - delete ptr; - ptr = NULL; - } - return &ptr; - } - - T** use() { - return &ptr; - } }; -template inline -void swap(scoped_ptr& a, scoped_ptr& b) { - a.swap(b); +// Specialization of DefaultDeleter for array types. +template +struct DefaultDeleter { + inline void operator()(T* ptr) const { + enum { type_must_be_complete = sizeof(T) }; + delete[] ptr; + } + + private: + // Disable this operator for any U != T because it is undefined to execute + // an array delete when the static type of the array mismatches the dynamic + // type. + // + // References: + // C++98 [expr.delete]p3 + // http://cplusplus.github.com/LWG/lwg-defects.html#938 + template void operator()(U* array) const; +}; + +template +struct DefaultDeleter { + // Never allow someone to declare something like scoped_ptr. + COMPILE_ASSERT(sizeof(T) == -1, do_not_use_array_with_size_as_type); +}; + +// Function object which invokes 'free' on its parameter, which must be +// a pointer. Can be used to store malloc-allocated pointers in scoped_ptr: +// +// scoped_ptr foo_ptr( +// static_cast(malloc(sizeof(int)))); +struct FreeDeleter { + inline void operator()(void* ptr) const { + free(ptr); + } +}; + +namespace internal { + +// Minimal implementation of the core logic of scoped_ptr, suitable for +// reuse in both scoped_ptr and its specializations. +template +class scoped_ptr_impl { + public: + explicit scoped_ptr_impl(T* p) : data_(p) { } + + // Initializer for deleters that have data parameters. + scoped_ptr_impl(T* p, const D& d) : data_(p, d) {} + + // Templated constructor that destructively takes the value from another + // scoped_ptr_impl. + template + scoped_ptr_impl(scoped_ptr_impl* other) + : data_(other->release(), other->get_deleter()) { + // We do not support move-only deleters. We could modify our move + // emulation to have webrtc::subtle::move() and webrtc::subtle::forward() + // functions that are imperfect emulations of their C++11 equivalents, + // but until there's a requirement, just assume deleters are copyable. + } + + template + void TakeState(scoped_ptr_impl* other) { + // See comment in templated constructor above regarding lack of support + // for move-only deleters. + reset(other->release()); + get_deleter() = other->get_deleter(); + } + + ~scoped_ptr_impl() { + if (data_.ptr != NULL) { + // Not using get_deleter() saves one function call in non-optimized + // builds. + static_cast(data_)(data_.ptr); + } + } + + void reset(T* p) { + // This is a self-reset, which is no longer allowed: http://crbug.com/162971 + if (p != NULL && p == data_.ptr) + abort(); + + // Note that running data_.ptr = p can lead to undefined behavior if + // get_deleter()(get()) deletes this. In order to pevent this, reset() + // should update the stored pointer before deleting its old value. + // + // However, changing reset() to use that behavior may cause current code to + // break in unexpected ways. If the destruction of the owned object + // dereferences the scoped_ptr when it is destroyed by a call to reset(), + // then it will incorrectly dispatch calls to |p| rather than the original + // value of |data_.ptr|. + // + // During the transition period, set the stored pointer to NULL while + // deleting the object. Eventually, this safety check will be removed to + // prevent the scenario initially described from occuring and + // http://crbug.com/176091 can be closed. + T* old = data_.ptr; + data_.ptr = NULL; + if (old != NULL) + static_cast(data_)(old); + data_.ptr = p; + } + + T* get() const { return data_.ptr; } + + D& get_deleter() { return data_; } + const D& get_deleter() const { return data_; } + + void swap(scoped_ptr_impl& p2) { + // Standard swap idiom: 'using std::swap' ensures that std::swap is + // present in the overload set, but we call swap unqualified so that + // any more-specific overloads can be used, if available. + using std::swap; + swap(static_cast(data_), static_cast(p2.data_)); + swap(data_.ptr, p2.data_.ptr); + } + + T* release() { + T* old_ptr = data_.ptr; + data_.ptr = NULL; + return old_ptr; + } + + private: + // Needed to allow type-converting constructor. + template friend class scoped_ptr_impl; + + // Use the empty base class optimization to allow us to have a D + // member, while avoiding any space overhead for it when D is an + // empty class. See e.g. http://www.cantrip.org/emptyopt.html for a good + // discussion of this technique. + struct Data : public D { + explicit Data(T* ptr_in) : ptr(ptr_in) {} + Data(T* ptr_in, const D& other) : D(other), ptr(ptr_in) {} + T* ptr; + }; + + Data data_; + + DISALLOW_COPY_AND_ASSIGN(scoped_ptr_impl); +}; + +} // namespace internal + +// A scoped_ptr is like a T*, except that the destructor of scoped_ptr +// automatically deletes the pointer it holds (if any). +// That is, scoped_ptr owns the T object that it points to. +// Like a T*, a scoped_ptr may hold either NULL or a pointer to a T object. +// Also like T*, scoped_ptr is thread-compatible, and once you +// dereference it, you get the thread safety guarantees of T. +// +// The size of scoped_ptr is small. On most compilers, when using the +// DefaultDeleter, sizeof(scoped_ptr) == sizeof(T*). Custom deleters will +// increase the size proportional to whatever state they need to have. See +// comments inside scoped_ptr_impl<> for details. +// +// Current implementation targets having a strict subset of C++11's +// unique_ptr<> features. Known deficiencies include not supporting move-only +// deleteres, function pointers as deleters, and deleters with reference +// types. +template > +class scoped_ptr { + WEBRTC_MOVE_ONLY_TYPE_FOR_CPP_03(scoped_ptr, RValue) + + public: + // The element and deleter types. + typedef T element_type; + typedef D deleter_type; + + // Constructor. Defaults to initializing with NULL. + scoped_ptr() : impl_(NULL) { } + + // Constructor. Takes ownership of p. + explicit scoped_ptr(element_type* p) : impl_(p) { } + + // Constructor. Allows initialization of a stateful deleter. + scoped_ptr(element_type* p, const D& d) : impl_(p, d) { } + + // Constructor. Allows construction from a scoped_ptr rvalue for a + // convertible type and deleter. + // + // IMPLEMENTATION NOTE: C++11 unique_ptr<> keeps this constructor distinct + // from the normal move constructor. By C++11 20.7.1.2.1.21, this constructor + // has different post-conditions if D is a reference type. Since this + // implementation does not support deleters with reference type, + // we do not need a separate move constructor allowing us to avoid one + // use of SFINAE. You only need to care about this if you modify the + // implementation of scoped_ptr. + template + scoped_ptr(scoped_ptr other) : impl_(&other.impl_) { + COMPILE_ASSERT(!webrtc::is_array::value, U_cannot_be_an_array); + } + + // Constructor. Move constructor for C++03 move emulation of this type. + scoped_ptr(RValue rvalue) : impl_(&rvalue.object->impl_) { } + + // operator=. Allows assignment from a scoped_ptr rvalue for a convertible + // type and deleter. + // + // IMPLEMENTATION NOTE: C++11 unique_ptr<> keeps this operator= distinct from + // the normal move assignment operator. By C++11 20.7.1.2.3.4, this templated + // form has different requirements on for move-only Deleters. Since this + // implementation does not support move-only Deleters, we do not need a + // separate move assignment operator allowing us to avoid one use of SFINAE. + // You only need to care about this if you modify the implementation of + // scoped_ptr. + template + scoped_ptr& operator=(scoped_ptr rhs) { + COMPILE_ASSERT(!webrtc::is_array::value, U_cannot_be_an_array); + impl_.TakeState(&rhs.impl_); + return *this; + } + + // Reset. Deletes the currently owned object, if any. + // Then takes ownership of a new object, if given. + void reset(element_type* p = NULL) { impl_.reset(p); } + + // Accessors to get the owned object. + // operator* and operator-> will assert() if there is no current object. + element_type& operator*() const { + assert(impl_.get() != NULL); + return *impl_.get(); + } + element_type* operator->() const { + assert(impl_.get() != NULL); + return impl_.get(); + } + element_type* get() const { return impl_.get(); } + + // Access to the deleter. + deleter_type& get_deleter() { return impl_.get_deleter(); } + const deleter_type& get_deleter() const { return impl_.get_deleter(); } + + // Allow scoped_ptr to be used in boolean expressions, but not + // implicitly convertible to a real bool (which is dangerous). + // + // Note that this trick is only safe when the == and != operators + // are declared explicitly, as otherwise "scoped_ptr1 == + // scoped_ptr2" will compile but do the wrong thing (i.e., convert + // to Testable and then do the comparison). + private: + typedef webrtc::internal::scoped_ptr_impl + scoped_ptr::*Testable; + + public: + operator Testable() const { return impl_.get() ? &scoped_ptr::impl_ : NULL; } + + // Comparison operators. + // These return whether two scoped_ptr refer to the same object, not just to + // two different but equal objects. + bool operator==(const element_type* p) const { return impl_.get() == p; } + bool operator!=(const element_type* p) const { return impl_.get() != p; } + + // Swap two scoped pointers. + void swap(scoped_ptr& p2) { + impl_.swap(p2.impl_); + } + + // Release a pointer. + // The return value is the current pointer held by this object. + // If this object holds a NULL pointer, the return value is NULL. + // After this operation, this object will hold a NULL pointer, + // and will not own the object any more. + element_type* release() WARN_UNUSED_RESULT { + return impl_.release(); + } + + // C++98 doesn't support functions templates with default parameters which + // makes it hard to write a PassAs() that understands converting the deleter + // while preserving simple calling semantics. + // + // Until there is a use case for PassAs() with custom deleters, just ignore + // the custom deleter. + template + scoped_ptr PassAs() { + return scoped_ptr(Pass()); + } + + private: + // Needed to reach into |impl_| in the constructor. + template friend class scoped_ptr; + webrtc::internal::scoped_ptr_impl impl_; + + // Forbidden for API compatibility with std::unique_ptr. + explicit scoped_ptr(int disallow_construction_from_null); + + // Forbid comparison of scoped_ptr types. If U != T, it totally + // doesn't make sense, and if U == T, it still doesn't make sense + // because you should never have the same object owned by two different + // scoped_ptrs. + template bool operator==(scoped_ptr const& p2) const; + template bool operator!=(scoped_ptr const& p2) const; +}; + +template +class scoped_ptr { + WEBRTC_MOVE_ONLY_TYPE_FOR_CPP_03(scoped_ptr, RValue) + + public: + // The element and deleter types. + typedef T element_type; + typedef D deleter_type; + + // Constructor. Defaults to initializing with NULL. + scoped_ptr() : impl_(NULL) { } + + // Constructor. Stores the given array. Note that the argument's type + // must exactly match T*. In particular: + // - it cannot be a pointer to a type derived from T, because it is + // inherently unsafe in the general case to access an array through a + // pointer whose dynamic type does not match its static type (eg., if + // T and the derived types had different sizes access would be + // incorrectly calculated). Deletion is also always undefined + // (C++98 [expr.delete]p3). If you're doing this, fix your code. + // - it cannot be NULL, because NULL is an integral expression, not a + // pointer to T. Use the no-argument version instead of explicitly + // passing NULL. + // - it cannot be const-qualified differently from T per unique_ptr spec + // (http://cplusplus.github.com/LWG/lwg-active.html#2118). Users wanting + // to work around this may use implicit_cast(). + // However, because of the first bullet in this comment, users MUST + // NOT use implicit_cast() to upcast the static type of the array. + explicit scoped_ptr(element_type* array) : impl_(array) { } + + // Constructor. Move constructor for C++03 move emulation of this type. + scoped_ptr(RValue rvalue) : impl_(&rvalue.object->impl_) { } + + // operator=. Move operator= for C++03 move emulation of this type. + scoped_ptr& operator=(RValue rhs) { + impl_.TakeState(&rhs.object->impl_); + return *this; + } + + // Reset. Deletes the currently owned array, if any. + // Then takes ownership of a new object, if given. + void reset(element_type* array = NULL) { impl_.reset(array); } + + // Accessors to get the owned array. + element_type& operator[](size_t i) const { + assert(impl_.get() != NULL); + return impl_.get()[i]; + } + element_type* get() const { return impl_.get(); } + + // Access to the deleter. + deleter_type& get_deleter() { return impl_.get_deleter(); } + const deleter_type& get_deleter() const { return impl_.get_deleter(); } + + // Allow scoped_ptr to be used in boolean expressions, but not + // implicitly convertible to a real bool (which is dangerous). + private: + typedef webrtc::internal::scoped_ptr_impl + scoped_ptr::*Testable; + + public: + operator Testable() const { return impl_.get() ? &scoped_ptr::impl_ : NULL; } + + // Comparison operators. + // These return whether two scoped_ptr refer to the same object, not just to + // two different but equal objects. + bool operator==(element_type* array) const { return impl_.get() == array; } + bool operator!=(element_type* array) const { return impl_.get() != array; } + + // Swap two scoped pointers. + void swap(scoped_ptr& p2) { + impl_.swap(p2.impl_); + } + + // Release a pointer. + // The return value is the current pointer held by this object. + // If this object holds a NULL pointer, the return value is NULL. + // After this operation, this object will hold a NULL pointer, + // and will not own the object any more. + element_type* release() WARN_UNUSED_RESULT { + return impl_.release(); + } + + private: + // Force element_type to be a complete type. + enum { type_must_be_complete = sizeof(element_type) }; + + // Actually hold the data. + webrtc::internal::scoped_ptr_impl impl_; + + // Disable initialization from any type other than element_type*, by + // providing a constructor that matches such an initialization, but is + // private and has no definition. This is disabled because it is not safe to + // call delete[] on an array whose static type does not match its dynamic + // type. + template explicit scoped_ptr(U* array); + explicit scoped_ptr(int disallow_construction_from_null); + + // Disable reset() from any type other than element_type*, for the same + // reasons as the constructor above. + template void reset(U* array); + void reset(int disallow_reset_from_null); + + // Forbid comparison of scoped_ptr types. If U != T, it totally + // doesn't make sense, and if U == T, it still doesn't make sense + // because you should never have the same object owned by two different + // scoped_ptrs. + template bool operator==(scoped_ptr const& p2) const; + template bool operator!=(scoped_ptr const& p2) const; +}; + +} // namespace webrtc + +// Free functions +template +void swap(webrtc::scoped_ptr& p1, webrtc::scoped_ptr& p2) { + p1.swap(p2); } +template +bool operator==(T* p1, const webrtc::scoped_ptr& p2) { + return p1 == p2.get(); +} +template +bool operator!=(T* p1, const webrtc::scoped_ptr& p2) { + return p1 != p2.get(); +} +namespace webrtc { +// DEPRECATED: Use scoped_ptr instead. +// TODO(ajm): Remove scoped_array. +// // scoped_array extends scoped_ptr to arrays. Deletion of the array pointed to // is guaranteed, either on destruction of the scoped_array or via an explicit // reset(). Use shared_array or std::vector if your needs are more complex. @@ -184,6 +639,9 @@ void swap(scoped_array& a, scoped_array& b) { a.swap(b); } +// DEPRECATED: Use scoped_ptr instead. +// TODO(ajm): Remove scoped_ptr_malloc. +// // scoped_ptr_malloc<> is similar to scoped_ptr<>, but it accepts a // second template argument, the function used to free the object. @@ -254,4 +712,4 @@ void swap(scoped_ptr_malloc& a, scoped_ptr_malloc& b) { } // namespace webrtc -#endif // #ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_SCOPED_PTR_H_ +#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_SCOPED_PTR_H_ diff --git a/media/webrtc/trunk/webrtc/system_wrappers/interface/scoped_refptr.h b/media/webrtc/trunk/webrtc/system_wrappers/interface/scoped_refptr.h index a8a0074a2db0..b344d211b1b0 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/interface/scoped_refptr.h +++ b/media/webrtc/trunk/webrtc/system_wrappers/interface/scoped_refptr.h @@ -11,6 +11,8 @@ #ifndef SYSTEM_WRAPPERS_INTERFACE_SCOPED_REFPTR_H_ #define SYSTEM_WRAPPERS_INTERFACE_SCOPED_REFPTR_H_ +#include + namespace webrtc { // Extracted from Chromium's src/base/memory/ref_counted.h. diff --git a/media/webrtc/trunk/webrtc/system_wrappers/interface/template_util.h b/media/webrtc/trunk/webrtc/system_wrappers/interface/template_util.h new file mode 100644 index 000000000000..5ae415b5a91d --- /dev/null +++ b/media/webrtc/trunk/webrtc/system_wrappers/interface/template_util.h @@ -0,0 +1,114 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// Borrowed from Chromium's src/base/template_util.h. + +#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_TEMPLATE_UTIL_H_ +#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_TEMPLATE_UTIL_H_ + +#include // For size_t. + +namespace webrtc { + +// Template definitions from tr1. + +template +struct integral_constant { + static const T value = v; + typedef T value_type; + typedef integral_constant type; +}; + +template const T integral_constant::value; + +typedef integral_constant true_type; +typedef integral_constant false_type; + +template struct is_pointer : false_type {}; +template struct is_pointer : true_type {}; + +template struct is_same : public false_type {}; +template struct is_same : true_type {}; + +template struct is_array : public false_type {}; +template struct is_array : public true_type {}; +template struct is_array : public true_type {}; + +template struct is_non_const_reference : false_type {}; +template struct is_non_const_reference : true_type {}; +template struct is_non_const_reference : false_type {}; + +template struct is_void : false_type {}; +template <> struct is_void : true_type {}; + +namespace internal { + +// Types YesType and NoType are guaranteed such that sizeof(YesType) < +// sizeof(NoType). +typedef char YesType; + +struct NoType { + YesType dummy[2]; +}; + +// This class is an implementation detail for is_convertible, and you +// don't need to know how it works to use is_convertible. For those +// who care: we declare two different functions, one whose argument is +// of type To and one with a variadic argument list. We give them +// return types of different size, so we can use sizeof to trick the +// compiler into telling us which function it would have chosen if we +// had called it with an argument of type From. See Alexandrescu's +// _Modern C++ Design_ for more details on this sort of trick. + +struct ConvertHelper { + template + static YesType Test(To); + + template + static NoType Test(...); + + template + static From& Create(); +}; + +// Used to determine if a type is a struct/union/class. Inspired by Boost's +// is_class type_trait implementation. +struct IsClassHelper { + template + static YesType Test(void(C::*)(void)); + + template + static NoType Test(...); +}; + +} // namespace internal + +// Inherits from true_type if From is convertible to To, false_type otherwise. +// +// Note that if the type is convertible, this will be a true_type REGARDLESS +// of whether or not the conversion would emit a warning. +template +struct is_convertible + : integral_constant( + internal::ConvertHelper::Create())) == + sizeof(internal::YesType)> { +}; + +template +struct is_class + : integral_constant(0)) == + sizeof(internal::YesType)> { +}; + +} // namespace webrtc + +#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_TEMPLATE_UTIL_H_ diff --git a/media/webrtc/trunk/webrtc/system_wrappers/interface/thread_annotations.h b/media/webrtc/trunk/webrtc/system_wrappers/interface/thread_annotations.h new file mode 100644 index 000000000000..612242d611fc --- /dev/null +++ b/media/webrtc/trunk/webrtc/system_wrappers/interface/thread_annotations.h @@ -0,0 +1,99 @@ +// +// Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. +// +// Use of this source code is governed by a BSD-style license +// that can be found in the LICENSE file in the root of the source +// tree. An additional intellectual property rights grant can be found +// in the file PATENTS. All contributing project authors may +// be found in the AUTHORS file in the root of the source tree. +// +// Borrowed from +// https://code.google.com/p/gperftools/source/browse/src/base/thread_annotations.h +// but adapted for clang attributes instead of the gcc. +// +// This header file contains the macro definitions for thread safety +// annotations that allow the developers to document the locking policies +// of their multi-threaded code. The annotations can also help program +// analysis tools to identify potential thread safety issues. + +#ifndef BASE_THREAD_ANNOTATIONS_H_ +#define BASE_THREAD_ANNOTATIONS_H_ + +#if defined(__clang__) && (!defined(SWIG)) +#define THREAD_ANNOTATION_ATTRIBUTE__(x) __attribute__((x)) +#else +#define THREAD_ANNOTATION_ATTRIBUTE__(x) // no-op +#endif + +// Document if a shared variable/field needs to be protected by a lock. +// GUARDED_BY allows the user to specify a particular lock that should be +// held when accessing the annotated variable, while GUARDED_VAR only +// indicates a shared variable should be guarded (by any lock). GUARDED_VAR +// is primarily used when the client cannot express the name of the lock. +#define GUARDED_BY(x) THREAD_ANNOTATION_ATTRIBUTE__(guarded_by(x)) +#define GUARDED_VAR THREAD_ANNOTATION_ATTRIBUTE__(guarded) + +// Document if the memory location pointed to by a pointer should be guarded +// by a lock when dereferencing the pointer. Similar to GUARDED_VAR, +// PT_GUARDED_VAR is primarily used when the client cannot express the name +// of the lock. Note that a pointer variable to a shared memory location +// could itself be a shared variable. For example, if a shared global pointer +// q, which is guarded by mu1, points to a shared memory location that is +// guarded by mu2, q should be annotated as follows: +// int *q GUARDED_BY(mu1) PT_GUARDED_BY(mu2); +#define PT_GUARDED_BY(x) THREAD_ANNOTATION_ATTRIBUTE__(point_to_guarded_by(x)) +#define PT_GUARDED_VAR THREAD_ANNOTATION_ATTRIBUTE__(point_to_guarded) + +// Document the acquisition order between locks that can be held +// simultaneously by a thread. For any two locks that need to be annotated +// to establish an acquisition order, only one of them needs the annotation. +// (i.e. You don't have to annotate both locks with both ACQUIRED_AFTER +// and ACQUIRED_BEFORE.) +#define ACQUIRED_AFTER(x) THREAD_ANNOTATION_ATTRIBUTE__(acquired_after(x)) +#define ACQUIRED_BEFORE(x) THREAD_ANNOTATION_ATTRIBUTE__(acquired_before(x)) + +// The following three annotations document the lock requirements for +// functions/methods. + +// Document if a function expects certain locks to be held before it is called +#define EXCLUSIVE_LOCKS_REQUIRED(...) \ + THREAD_ANNOTATION_ATTRIBUTE__(exclusive_locks_required(__VA_ARGS__)) + +#define SHARED_LOCKS_REQUIRED(...) \ + THREAD_ANNOTATION_ATTRIBUTE__(shared_locks_required(__VA_ARGS__)) + +// Document the locks acquired in the body of the function. These locks +// cannot be held when calling this function (as google3's Mutex locks are +// non-reentrant). +#define LOCKS_EXCLUDED(x) THREAD_ANNOTATION_ATTRIBUTE__(locks_excluded(x)) + +// Document the lock the annotated function returns without acquiring it. +#define LOCK_RETURNED(x) THREAD_ANNOTATION_ATTRIBUTE__(lock_returned(x)) + +// Document if a class/type is a lockable type (such as the Mutex class). +#define LOCKABLE THREAD_ANNOTATION_ATTRIBUTE__(lockable) + +// Document if a class is a scoped lockable type (such as the MutexLock class). +#define SCOPED_LOCKABLE THREAD_ANNOTATION_ATTRIBUTE__(scoped_lockable) + +// The following annotations specify lock and unlock primitives. +#define EXCLUSIVE_LOCK_FUNCTION(...) \ + THREAD_ANNOTATION_ATTRIBUTE__(exclusive_lock_function(__VA_ARGS__)) + +#define SHARED_LOCK_FUNCTION(...) \ + THREAD_ANNOTATION_ATTRIBUTE__(shared_lock_function(__VA_ARGS__)) + +#define EXCLUSIVE_TRYLOCK_FUNCTION(...) \ + THREAD_ANNOTATION_ATTRIBUTE__(exclusive_trylock_function(__VA_ARGS__)) + +#define SHARED_TRYLOCK_FUNCTION(...) \ + THREAD_ANNOTATION_ATTRIBUTE__(shared_trylock_function(__VA_ARGS__)) + +#define UNLOCK_FUNCTION(...) \ + THREAD_ANNOTATION_ATTRIBUTE__(unlock_function(__VA_ARGS__)) + +// An escape hatch for thread safety analysis to ignore the annotated function. +#define NO_THREAD_SAFETY_ANALYSIS \ + THREAD_ANNOTATION_ATTRIBUTE__(no_thread_safety_analysis) + +#endif // BASE_THREAD_ANNOTATIONS_H_ diff --git a/media/webrtc/trunk/webrtc/system_wrappers/interface/trace.h b/media/webrtc/trunk/webrtc/system_wrappers/interface/trace.h index 85e02658ead6..44ea658bdf28 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/interface/trace.h +++ b/media/webrtc/trunk/webrtc/system_wrappers/interface/trace.h @@ -19,14 +19,15 @@ #include "webrtc/common_types.h" #include "webrtc/typedefs.h" -#if !defined(WEBRTC_LOGGING) -#define WEBRTC_TRACE (true) ? (void)0 : Trace::Add +namespace webrtc { + +#if defined(WEBRTC_RESTRICT_LOGGING) +// Disable all TRACE macros. The LOG macro is still functional. +#define WEBRTC_TRACE true ? (void) 0 : Trace::Add #else #define WEBRTC_TRACE Trace::Add #endif -namespace webrtc { - class Trace { public: // The length of the trace text preceeding the log message. diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/atomic32_win.cc b/media/webrtc/trunk/webrtc/system_wrappers/source/atomic32_win.cc index 5dd070929a6a..7c70376fa8d2 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/atomic32_win.cc +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/atomic32_win.cc @@ -20,9 +20,8 @@ namespace webrtc { Atomic32::Atomic32(int32_t initial_value) : value_(initial_value) { - // Make sure that the counter variable we're using is of the same size - // as what the API expects. - COMPILE_ASSERT(sizeof(value_) == sizeof(LONG)); + COMPILE_ASSERT(sizeof(value_) == sizeof(LONG), + counter_variable_is_the_expected_size); assert(Is32bitAligned()); } diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/condition_variable_unittest.cc b/media/webrtc/trunk/webrtc/system_wrappers/source/condition_variable_unittest.cc index 48057fc086d3..0d287b71b8d4 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/condition_variable_unittest.cc +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/condition_variable_unittest.cc @@ -175,10 +175,6 @@ class CondVarTest : public ::testing::Test { ThreadWrapper* thread_; }; -// Disable for TSan v2, see -// https://code.google.com/p/webrtc/issues/detail?id=2259 for details. -#if !defined(THREAD_SANITIZER) - // The SetUp and TearDown functions use condition variables. // This test verifies those pieces in isolation. TEST_F(CondVarTest, InitFunctionsWork) { @@ -195,8 +191,6 @@ TEST_F(CondVarTest, PassBatonMultipleTimes) { EXPECT_EQ(2 * kNumberOfRounds, baton_.PassCount()); } -#endif // if !defined(THREAD_SANITIZER) - } // anonymous namespace } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/cpu_features_android.c b/media/webrtc/trunk/webrtc/system_wrappers/source/cpu_features_android.c index 9b44d02846a6..79e29de3facb 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/cpu_features_android.c +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/cpu_features_android.c @@ -8,11 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#if defined(WEBRTC_CHROMIUM_BUILD) -#include -#else -#include "webrtc/system_wrappers/source/android/cpu-features.h" -#endif // defined(WEBRTC_CHROMIUM_BUILD) +#include "webrtc/system_wrappers/source/droid-cpu-features.h" uint64_t WebRtc_GetCPUFeaturesARM(void) { return android_getCpuFeatures(); diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/critical_section_unittest.cc b/media/webrtc/trunk/webrtc/system_wrappers/source/critical_section_unittest.cc index 40df570e9a24..5c416b2de395 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/critical_section_unittest.cc +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/critical_section_unittest.cc @@ -81,7 +81,7 @@ bool LockUnlockThenStopRunFunction(void* obj) { return false; } -TEST_F(CritSectTest, ThreadWakesOnce) { +TEST_F(CritSectTest, ThreadWakesOnce) NO_THREAD_SAFETY_ANALYSIS { CriticalSectionWrapper* crit_sect = CriticalSectionWrapper::CreateCriticalSection(); ProtectedCount count(crit_sect); @@ -110,7 +110,7 @@ bool LockUnlockRunFunction(void* obj) { return true; } -TEST_F(CritSectTest, ThreadWakesTwice) { +TEST_F(CritSectTest, ThreadWakesTwice) NO_THREAD_SAFETY_ANALYSIS { CriticalSectionWrapper* crit_sect = CriticalSectionWrapper::CreateCriticalSection(); ProtectedCount count(crit_sect); diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/android/cpu-features.c b/media/webrtc/trunk/webrtc/system_wrappers/source/droid-cpu-features.c similarity index 99% rename from media/webrtc/trunk/webrtc/system_wrappers/source/android/cpu-features.c rename to media/webrtc/trunk/webrtc/system_wrappers/source/droid-cpu-features.c index efe7b95d6ab1..4d6aef82ebc8 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/android/cpu-features.c +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/droid-cpu-features.c @@ -18,7 +18,7 @@ #include #include -#include "webrtc/system_wrappers/source/android/cpu-features.h" +#include "webrtc/system_wrappers/source/droid-cpu-features.h" static pthread_once_t g_once; static AndroidCpuFamily g_cpuFamily; diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/android/cpu-features.h b/media/webrtc/trunk/webrtc/system_wrappers/source/droid-cpu-features.h similarity index 100% rename from media/webrtc/trunk/webrtc/system_wrappers/source/android/cpu-features.h rename to media/webrtc/trunk/webrtc/system_wrappers/source/droid-cpu-features.h diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/event_win.cc b/media/webrtc/trunk/webrtc/system_wrappers/source/event_win.cc index d2032131d081..398e714a7596 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/event_win.cc +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/event_win.cc @@ -23,16 +23,17 @@ EventWindows::EventWindows() } EventWindows::~EventWindows() { + StopTimer(); CloseHandle(event_); } bool EventWindows::Set() { // Note: setting an event that is already set has no effect. - return SetEvent(event_) == 1 ? true : false; + return SetEvent(event_) == 1; } bool EventWindows::Reset() { - return ResetEvent(event_) == 1 ? true : false; + return ResetEvent(event_) == 1; } EventTypeWrapper EventWindows::Wait(unsigned long max_time) { @@ -52,6 +53,7 @@ bool EventWindows::StartTimer(bool periodic, unsigned long time) { timeKillEvent(timerID_); timerID_ = NULL; } + if (periodic) { timerID_ = timeSetEvent(time, 0, (LPTIMECALLBACK)HANDLE(event_), 0, TIME_PERIODIC | TIME_CALLBACK_EVENT_PULSE); @@ -60,15 +62,15 @@ bool EventWindows::StartTimer(bool periodic, unsigned long time) { TIME_ONESHOT | TIME_CALLBACK_EVENT_SET); } - if (timerID_ == NULL) { - return false; - } - return true; + return timerID_ != NULL; } bool EventWindows::StopTimer() { - timeKillEvent(timerID_); - timerID_ = NULL; + if (timerID_ != NULL) { + timeKillEvent(timerID_); + timerID_ = NULL; + } + return true; } diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/file_impl.cc b/media/webrtc/trunk/webrtc/system_wrappers/source/file_impl.cc index 190f399552ba..8b21b96eb45c 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/file_impl.cc +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/file_impl.cc @@ -30,6 +30,7 @@ FileWrapper* FileWrapper::Create() { FileWrapperImpl::FileWrapperImpl() : rw_lock_(RWLockWrapper::CreateRWLock()), id_(NULL), + managed_file_handle_(true), open_(false), looping_(false), read_only_(false), @@ -39,7 +40,7 @@ FileWrapperImpl::FileWrapperImpl() } FileWrapperImpl::~FileWrapperImpl() { - if (id_ != NULL) { + if (id_ != NULL && managed_file_handle_) { fclose(id_); } } @@ -71,8 +72,7 @@ int FileWrapperImpl::Flush() { return FlushImpl(); } -int FileWrapperImpl::FileName(char* file_name_utf8, - size_t size) const { +int FileWrapperImpl::FileName(char* file_name_utf8, size_t size) const { ReadLockScoped read(*rw_lock_); size_t length = strlen(file_name_utf8_); if (length > kMaxFileNameSize) { @@ -100,6 +100,8 @@ bool FileWrapperImpl::Open() const { int FileWrapperImpl::OpenFile(const char* file_name_utf8, bool read_only, bool loop, bool text) { WriteLockScoped write(*rw_lock_); + if (id_ != NULL && !managed_file_handle_) + return -1; size_t length = strlen(file_name_utf8); if (length > kMaxFileNameSize - 1) { return -1; @@ -154,6 +156,7 @@ int FileWrapperImpl::OpenFile(const char* file_name_utf8, bool read_only, fclose(id_); } id_ = tmp_id; + managed_file_handle_ = true; looping_ = loop; open_ = true; return 0; @@ -161,6 +164,29 @@ int FileWrapperImpl::OpenFile(const char* file_name_utf8, bool read_only, return -1; } +int FileWrapperImpl::OpenFromFileHandle(FILE* handle, + bool manage_file, + bool read_only, + bool loop) { + WriteLockScoped write(*rw_lock_); + if (!handle) + return -1; + + if (id_ != NULL) { + if (managed_file_handle_) + fclose(id_); + else + return -1; + } + + id_ = handle; + managed_file_handle_ = manage_file; + read_only_ = read_only; + looping_ = loop; + open_ = true; + return 0; +} + int FileWrapperImpl::Read(void* buf, int length) { WriteLockScoped write(*rw_lock_); if (length < 0) @@ -233,7 +259,8 @@ bool FileWrapperImpl::Write(const void* buf, int length) { int FileWrapperImpl::CloseFileImpl() { if (id_ != NULL) { - fclose(id_); + if (managed_file_handle_) + fclose(id_); id_ = NULL; } memset(file_name_utf8_, 0, kMaxFileNameSize); diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/file_impl.h b/media/webrtc/trunk/webrtc/system_wrappers/source/file_impl.h index 8cc0979c60a7..1abf01019407 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/file_impl.h +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/file_impl.h @@ -35,6 +35,11 @@ class FileWrapperImpl : public FileWrapper { bool loop = false, bool text = false) OVERRIDE; + virtual int OpenFromFileHandle(FILE* handle, + bool manage_file, + bool read_only, + bool loop = false) OVERRIDE; + virtual int CloseFile() OVERRIDE; virtual int SetMaxFileSize(size_t bytes) OVERRIDE; virtual int Flush() OVERRIDE; @@ -51,6 +56,7 @@ class FileWrapperImpl : public FileWrapper { scoped_ptr rw_lock_; FILE* id_; + bool managed_file_handle_; bool open_; bool looping_; bool read_only_; diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/list_no_stl.cc b/media/webrtc/trunk/webrtc/system_wrappers/source/list_no_stl.cc deleted file mode 100644 index 5c9f5af01ce7..000000000000 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/list_no_stl.cc +++ /dev/null @@ -1,241 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "webrtc/system_wrappers/interface/list_wrapper.h" - -#include "webrtc/system_wrappers/interface/critical_section_wrapper.h" -#include "webrtc/system_wrappers/interface/trace.h" - -namespace webrtc { - -ListItem::ListItem(const void* item) - : next_(0), - prev_(0), - item_ptr_(item), - item_(0) { -} - -ListItem::ListItem(const unsigned int item) - : next_(0), - prev_(0), - item_ptr_(0), - item_(item) { -} - -ListItem::~ListItem() { -} - -void* ListItem::GetItem() const { - return const_cast(item_ptr_); -} - -unsigned int ListItem::GetUnsignedItem() const { - return item_; -} - -ListWrapper::ListWrapper() - : critical_section_(CriticalSectionWrapper::CreateCriticalSection()), - first_(0), - last_(0), - size_(0) { -} - -ListWrapper::~ListWrapper() { - if (!Empty()) { - // TODO(hellner) I'm not sure this loggin is useful. - WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1, - "Potential memory leak in ListWrapper"); - // Remove all remaining list items. - while (Erase(First()) == 0) - {} - } - delete critical_section_; -} - -bool ListWrapper::Empty() const { - return !first_ && !last_; -} - -unsigned int ListWrapper::GetSize() const { - return size_; -} - -int ListWrapper::PushBack(const void* ptr) { - ListItem* item = new ListItem(ptr); - CriticalSectionScoped lock(critical_section_); - PushBackImpl(item); - return 0; -} - -int ListWrapper::PushBack(const unsigned int item_id) { - ListItem* item = new ListItem(item_id); - CriticalSectionScoped lock(critical_section_); - PushBackImpl(item); - return 0; -} - -int ListWrapper::PushFront(const unsigned int item_id) { - ListItem* item = new ListItem(item_id); - CriticalSectionScoped lock(critical_section_); - PushFrontImpl(item); - return 0; -} - -int ListWrapper::PushFront(const void* ptr) { - ListItem* item = new ListItem(ptr); - CriticalSectionScoped lock(critical_section_); - PushFrontImpl(item); - return 0; -} - -int ListWrapper::PopFront() { - return Erase(first_); -} - -int ListWrapper::PopBack() { - return Erase(last_); -} - -ListItem* ListWrapper::First() const { - return first_; -} - -ListItem* ListWrapper::Last() const { - return last_; -} - -ListItem* ListWrapper::Next(ListItem* item) const { - if (!item) { - return 0; - } - return item->next_; -} - -ListItem* ListWrapper::Previous(ListItem* item) const { - if (!item) { - return 0; - } - return item->prev_; -} - -int ListWrapper::Insert(ListItem* existing_previous_item, ListItem* new_item) { - if (!new_item) { - return -1; - } - // Allow existing_previous_item to be NULL if the list is empty. - // TODO(hellner) why allow this? Keep it as is for now to avoid - // breaking API contract. - if (!existing_previous_item && !Empty()) { - return -1; - } - CriticalSectionScoped lock(critical_section_); - if (!existing_previous_item) { - PushBackImpl(new_item); - return 0; - } - ListItem* next_item = existing_previous_item->next_; - new_item->next_ = existing_previous_item->next_; - new_item->prev_ = existing_previous_item; - existing_previous_item->next_ = new_item; - if (next_item) { - next_item->prev_ = new_item; - } else { - last_ = new_item; - } - size_++; - return 0; -} - -int ListWrapper::InsertBefore(ListItem* existing_next_item, - ListItem* new_item) { - if (!new_item) { - return -1; - } - // Allow existing_next_item to be NULL if the list is empty. - // Todo: why allow this? Keep it as is for now to avoid breaking API - // contract. - if (!existing_next_item && !Empty()) { - return -1; - } - CriticalSectionScoped lock(critical_section_); - if (!existing_next_item) { - PushBackImpl(new_item); - return 0; - } - - ListItem* previous_item = existing_next_item->prev_; - new_item->next_ = existing_next_item; - new_item->prev_ = previous_item; - existing_next_item->prev_ = new_item; - if (previous_item) { - previous_item->next_ = new_item; - } else { - first_ = new_item; - } - size_++; - return 0; -} - -int ListWrapper::Erase(ListItem* item) { - if (!item) { - return -1; - } - size_--; - ListItem* previous_item = item->prev_; - ListItem* next_item = item->next_; - if (!previous_item) { - if (next_item) { - next_item->prev_ = 0; - } - first_ = next_item; - } else { - previous_item->next_ = next_item; - } - if (!next_item) { - if (previous_item) { - previous_item->next_ = 0; - } - last_ = previous_item; - } else { - next_item->prev_ = previous_item; - } - delete item; - return 0; -} - -void ListWrapper::PushBackImpl(ListItem* item) { - if (Empty()) { - first_ = item; - last_ = item; - size_++; - return; - } - - item->prev_ = last_; - last_->next_ = item; - last_ = item; - size_++; -} - -void ListWrapper::PushFrontImpl(ListItem* item) { - if (Empty()) { - first_ = item; - last_ = item; - size_++; - return; - } - - item->next_ = first_; - first_->prev_ = item; - first_ = item; - size_++; -} - -} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/list_no_stl.h b/media/webrtc/trunk/webrtc/system_wrappers/source/list_no_stl.h deleted file mode 100644 index dcc9209e25c5..000000000000 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/list_no_stl.h +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_LIST_NO_STL_H_ -#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_LIST_NO_STL_H_ - -#include "webrtc/system_wrappers/interface/constructor_magic.h" - -namespace webrtc { - -class CriticalSectionWrapper; - -class ListNoStlItem { - public: - ListNoStlItem(const void* ptr); - ListNoStlItem(const unsigned int item); - virtual ~ListNoStlItem(); - void* GetItem() const; - unsigned int GetUnsignedItem() const; - - protected: - ListNoStlItem* next_; - ListNoStlItem* prev_; - - private: - friend class ListNoStl; - - const void* item_ptr_; - const unsigned int item_; - DISALLOW_COPY_AND_ASSIGN(ListNoStlItem); -}; - -class ListNoStl { - public: - ListNoStl(); - virtual ~ListNoStl(); - - // ListWrapper functions - unsigned int GetSize() const; - int PushBack(const void* ptr); - int PushBack(const unsigned int item_id); - int PushFront(const void* ptr); - int PushFront(const unsigned int item_id); - int PopFront(); - int PopBack(); - bool Empty() const; - ListNoStlItem* First() const; - ListNoStlItem* Last() const; - ListNoStlItem* Next(ListNoStlItem* item) const; - ListNoStlItem* Previous(ListNoStlItem* item) const; - int Erase(ListNoStlItem* item); - int Insert(ListNoStlItem* existing_previous_item, - ListNoStlItem* new_item); - - int InsertBefore(ListNoStlItem* existing_next_item, - ListNoStlItem* new_item); - - private: - void PushBack(ListNoStlItem* item); - void PushFront(ListNoStlItem* item); - - CriticalSectionWrapper* critical_section_; - ListNoStlItem* first_; - ListNoStlItem* last_; - unsigned int size_; - DISALLOW_COPY_AND_ASSIGN(ListNoStl); -}; - -} // namespace webrtc - -#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_LIST_NO_STL_H_ diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/list_stl.cc b/media/webrtc/trunk/webrtc/system_wrappers/source/list_stl.cc deleted file mode 100644 index 81b6f0cc8af0..000000000000 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/list_stl.cc +++ /dev/null @@ -1,207 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "webrtc/system_wrappers/interface/list_wrapper.h" - -#include "webrtc/system_wrappers/interface/trace.h" - -namespace webrtc { - -ListItem::ListItem(const void* item) - : this_iter_(), - item_ptr_(item), - item_(0) { -} - -ListItem::ListItem(const unsigned int item) - : this_iter_(), - item_ptr_(0), - item_(item) { -} - -ListItem::~ListItem() { -} - -void* ListItem::GetItem() const { - return const_cast(item_ptr_); -} - -unsigned int ListItem::GetUnsignedItem() const { - return item_; -} - -ListWrapper::ListWrapper() - : list_() { -} - -ListWrapper::~ListWrapper() { - if (!Empty()) { - // TODO(hellner) I'm not sure this loggin is useful. - WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1, - "Potential memory leak in ListWrapper"); - // Remove all remaining list items. - while (Erase(First()) == 0) {} - } -} - -bool ListWrapper::Empty() const { - return list_.empty(); -} - -unsigned int ListWrapper::GetSize() const { - return list_.size(); -} - -int ListWrapper::PushBack(const void* ptr) { - ListItem* item = new ListItem(ptr); - list_.push_back(item); - return 0; -} - -int ListWrapper::PushBack(const unsigned int item_id) { - ListItem* item = new ListItem(item_id); - list_.push_back(item); - return 0; -} - -int ListWrapper::PushFront(const unsigned int item_id) { - ListItem* item = new ListItem(item_id); - list_.push_front(item); - return 0; -} - -int ListWrapper::PushFront(const void* ptr) { - ListItem* item = new ListItem(ptr); - list_.push_front(item); - return 0; -} - -int ListWrapper::PopFront() { - if (list_.empty()) { - return -1; - } - list_.pop_front(); - return 0; -} - -int ListWrapper::PopBack() { - if (list_.empty()) { - return -1; - } - list_.pop_back(); - return 0; -} - -ListItem* ListWrapper::First() const { - if (list_.empty()) { - return NULL; - } - std::list::iterator item_iter = list_.begin(); - ListItem* return_item = (*item_iter); - return_item->this_iter_ = item_iter; - return return_item; -} - -ListItem* ListWrapper::Last() const { - if (list_.empty()) { - return NULL; - } - // std::list::end() addresses the last item + 1. Decrement so that the - // actual last is accessed. - std::list::iterator item_iter = list_.end(); - --item_iter; - ListItem* return_item = (*item_iter); - return_item->this_iter_ = item_iter; - return return_item; -} - -ListItem* ListWrapper::Next(ListItem* item) const { - if (item == NULL) { - return NULL; - } - std::list::iterator item_iter = item->this_iter_; - ++item_iter; - if (item_iter == list_.end()) { - return NULL; - } - ListItem* return_item = (*item_iter); - return_item->this_iter_ = item_iter; - return return_item; -} - -ListItem* ListWrapper::Previous(ListItem* item) const { - if (item == NULL) { - return NULL; - } - std::list::iterator item_iter = item->this_iter_; - if (item_iter == list_.begin()) { - return NULL; - } - --item_iter; - ListItem* return_item = (*item_iter); - return_item->this_iter_ = item_iter; - return return_item; -} - -int ListWrapper::Insert(ListItem* existing_previous_item, - ListItem* new_item) { - // Allow existing_previous_item to be NULL if the list is empty. - // TODO(hellner) why allow this? Keep it as is for now to avoid - // breaking API contract. - if (!existing_previous_item && !Empty()) { - return -1; - } - - if (!new_item) { - return -1; - } - - std::list::iterator insert_location = list_.begin(); - if (!Empty()) { - insert_location = existing_previous_item->this_iter_; - if (insert_location != list_.end()) { - ++insert_location; - } - } - - list_.insert(insert_location, new_item); - return 0; -} - -int ListWrapper::InsertBefore(ListItem* existing_next_item, - ListItem* new_item) { - // Allow existing_next_item to be NULL if the list is empty. - // Todo: why allow this? Keep it as is for now to avoid breaking API - // contract. - if (!existing_next_item && !Empty()) { - return -1; - } - if (!new_item) { - return -1; - } - - std::list::iterator insert_location = list_.begin(); - if (!Empty()) { - insert_location = existing_next_item->this_iter_; - } - - list_.insert(insert_location, new_item); - return 0; -} - -int ListWrapper::Erase(ListItem* item) { - if (item == NULL) { - return -1; - } - list_.erase(item->this_iter_); - return 0; -} - -} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/list_stl.h b/media/webrtc/trunk/webrtc/system_wrappers/source/list_stl.h deleted file mode 100644 index 29945304f363..000000000000 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/list_stl.h +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_LIST_STL_H_ -#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_LIST_STL_H_ - -#include - -#include "webrtc/system_wrappers/interface/constructor_magic.h" - -namespace webrtc { - -class ListItem { - public: - ListItem(const void* ptr); - ListItem(const unsigned int item); - virtual ~ListItem(); - void* GetItem() const; - unsigned int GetUnsignedItem() const; - - private: - friend class ListWrapper; - mutable std::list::iterator this_iter_; - const void* item_ptr_; - const unsigned int item_; - DISALLOW_COPY_AND_ASSIGN(ListItem); -}; - -class ListWrapper { - public: - ListWrapper(); - ~ListWrapper(); - - // ListWrapper functions - unsigned int GetSize() const; - int PushBack(const void* ptr); - int PushBack(const unsigned int item_id); - int PushFront(const void* ptr); - int PushFront(const unsigned int item_id); - int PopFront(); - int PopBack(); - bool Empty() const; - ListItem* First() const; - ListItem* Last() const; - ListItem* Next(ListItem* item) const; - ListItem* Previous(ListItem* item) const; - int Erase(ListItem* item); - int Insert(ListItem* existing_previous_item, ListItem* new_item); - int InsertBefore(ListItem* existing_next_item, ListItem* new_item); - - private: - mutable std::list list_; - DISALLOW_COPY_AND_ASSIGN(ListWrapper); -}; - -} // namespace webrtc - -#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_LIST_STL_H_ diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/list_unittest.cc b/media/webrtc/trunk/webrtc/system_wrappers/source/list_unittest.cc deleted file mode 100644 index 1e4f922a6b07..000000000000 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/list_unittest.cc +++ /dev/null @@ -1,475 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "webrtc/system_wrappers/interface/list_wrapper.h" - -#include "testing/gtest/include/gtest/gtest.h" -#include "webrtc/system_wrappers/interface/scoped_ptr.h" - -using ::webrtc::ListWrapper; -using ::webrtc::ListItem; -using ::webrtc::scoped_ptr; - -// Note: kNumberOfElements needs to be even. -const unsigned int kNumberOfElements = 10; - -// An opaque implementation of dynamic or statically allocated unsigned ints. -// This class makes it possible to use the exact same code for testing of both -// the dynamic and static implementation of ListWrapper. -// Clarification: ListWrapper has two versions of PushBack(..). It takes an -// unsigned integer or a void pointer. The integer implementation takes care -// of memory management. The void pointer version expect the caller to manage -// the memory associated with the void pointer. -// This class works like the integer version but can be implemented on top of -// either the integer version or void pointer version of ListWrapper. -// Note: the non-virtual fuctions behave the same for both versions. -class ListWrapperSimple { -public: - static ListWrapperSimple* Create(bool static_allocation); - virtual ~ListWrapperSimple() {} - - // These three functions should be used for manipulating ListItems so that - // they are the type corresponding to the underlying implementation. - virtual unsigned int GetUnsignedItem( - const ListItem* item) const = 0; - virtual ListItem* CreateListItem(unsigned int item_id) = 0; - unsigned int GetSize() const { - return list_.GetSize(); - } - virtual int PushBack(const unsigned int item_id) = 0; - virtual int PushFront(const unsigned int item_id) = 0; - virtual int PopFront() = 0; - virtual int PopBack() = 0; - bool Empty() const { - return list_.Empty(); - } - ListItem* First() const { - return list_.First(); - } - ListItem* Last() const { - return list_.Last(); - } - ListItem* Next(ListItem* item) const { - return list_.Next(item); - } - ListItem* Previous(ListItem* item) const { - return list_.Previous(item); - } - virtual int Erase(ListItem* item) = 0; - int Insert(ListItem* existing_previous_item, - ListItem* new_item) { - const int retval = list_.Insert(existing_previous_item, new_item); - if (retval != 0) { - EXPECT_TRUE(DestroyListItem(new_item)); - } - return retval; - } - - int InsertBefore(ListItem* existing_next_item, - ListItem* new_item) { - const int retval = list_.InsertBefore(existing_next_item, new_item); - if (retval != 0) { - EXPECT_TRUE(DestroyListItem(new_item)); - } - return retval; - } -protected: - ListWrapperSimple() {} - - virtual bool DestroyListItemContent(ListItem* item) = 0; - bool DestroyListItem(ListItem* item) { - const bool retval = DestroyListItemContent(item); - delete item; - return retval; - } - - ListWrapper list_; -}; - -void ClearList(ListWrapperSimple* list_wrapper) { - if (list_wrapper == NULL) { - return; - } - ListItem* list_item = list_wrapper->First(); - while (list_item != NULL) { - EXPECT_EQ(list_wrapper->Erase(list_item), 0); - list_item = list_wrapper->First(); - } -} - -class ListWrapperStatic : public ListWrapperSimple { -public: - ListWrapperStatic() {} - virtual ~ListWrapperStatic() { - ClearList(this); - } - - virtual unsigned int GetUnsignedItem(const ListItem* item) const { - return item->GetUnsignedItem(); - } - virtual ListItem* CreateListItem(unsigned int item_id) { - return new ListItem(item_id); - } - virtual bool DestroyListItemContent(ListItem* item) { - return true; - } - virtual int PushBack(const unsigned int item_id) { - return list_.PushBack(item_id); - } - virtual int PushFront(const unsigned int item_id) { - return list_.PushFront(item_id); - } - virtual int PopFront() { - return list_.PopFront(); - } - virtual int PopBack() { - return list_.PopBack(); - } - virtual int Erase(ListItem* item) { - return list_.Erase(item); - } -}; - -class ListWrapperDynamic : public ListWrapperSimple { -public: - ListWrapperDynamic() {} - virtual ~ListWrapperDynamic() { - ClearList(this); - } - - virtual unsigned int GetUnsignedItem(const ListItem* item) const { - const unsigned int* return_value_pointer = - reinterpret_cast(item->GetItem()); - if (return_value_pointer == NULL) { - return -1; - } - return *return_value_pointer; - } - virtual ListItem* CreateListItem(unsigned int item_id) { - unsigned int* item_id_pointer = new unsigned int; - if (item_id_pointer == NULL) { - return NULL; - } - *item_id_pointer = item_id; - ListItem* return_value = new ListItem( - reinterpret_cast(item_id_pointer)); - if (return_value == NULL) { - delete item_id_pointer; - return NULL; - } - return return_value; - } - virtual bool DestroyListItemContent(ListItem* item) { - if (item == NULL) { - return false; - } - bool return_value = false; - unsigned int* item_id_ptr = reinterpret_cast( - item->GetItem()); - if (item_id_ptr != NULL) { - return_value = true; - delete item_id_ptr; - } - return return_value; - } - virtual int PushBack(const unsigned int item_id) { - unsigned int* item_id_ptr = new unsigned int; - if (item_id_ptr == NULL) { - return -1; - } - *item_id_ptr = item_id; - const int return_value = list_.PushBack( - reinterpret_cast(item_id_ptr)); - if (return_value != 0) { - delete item_id_ptr; - } - return return_value; - } - virtual int PushFront(const unsigned int item_id) { - unsigned int* item_id_ptr = new unsigned int; - if (item_id_ptr == NULL) { - return -1; - } - *item_id_ptr = item_id; - const int return_value = list_.PushFront( - reinterpret_cast(item_id_ptr)); - if (return_value != 0) { - delete item_id_ptr; - } - return return_value; - } - virtual int PopFront() { - return Erase(list_.First()); - } - virtual int PopBack() { - return Erase(list_.Last()); - } - virtual int Erase(ListItem* item) { - if (item == NULL) { - return -1; - } - int retval = 0; - if (!DestroyListItemContent(item)) { - retval = -1; - ADD_FAILURE(); - } - if (list_.Erase(item) != 0) { - retval = -1; - } - return retval; - } -}; - -ListWrapperSimple* ListWrapperSimple::Create(bool static_allocation) { - if (static_allocation) { - return new ListWrapperStatic(); - } - return new ListWrapperDynamic(); -} - -ListWrapperSimple* CreateAscendingList(bool static_allocation) { - ListWrapperSimple* return_value = ListWrapperSimple::Create( - static_allocation); - if (return_value == NULL) { - return NULL; - } - for (unsigned int i = 0; i < kNumberOfElements; ++i) { - if (return_value->PushBack(i) == -1) { - ClearList(return_value); - delete return_value; - return NULL; - } - } - return return_value; -} - -ListWrapperSimple* CreateDescendingList(bool static_allocation) { - ListWrapperSimple* return_value = ListWrapperSimple::Create( - static_allocation); - if (return_value == NULL) { - return NULL; - } - for (unsigned int i = 0; i < kNumberOfElements; ++i) { - if (return_value->PushBack(kNumberOfElements - i - 1) == -1) { - ClearList(return_value); - delete return_value; - return NULL; - } - } - return return_value; -} - -// [0,kNumberOfElements - 1,1,kNumberOfElements - 2,...] (this is why -// kNumberOfElements need to be even) -ListWrapperSimple* CreateInterleavedList(bool static_allocation) { - ListWrapperSimple* return_value = ListWrapperSimple::Create( - static_allocation); - if (return_value == NULL) { - return NULL; - } - unsigned int uneven_count = 0; - unsigned int even_count = 0; - for (unsigned int i = 0; i < kNumberOfElements; i++) { - unsigned int push_value = 0; - if ((i % 2) == 0) { - push_value = even_count; - even_count++; - } else { - push_value = kNumberOfElements - uneven_count - 1; - uneven_count++; - } - if (return_value->PushBack(push_value) == -1) { - ClearList(return_value); - delete return_value; - return NULL; - } - } - return return_value; -} - -void PrintList(const ListWrapperSimple* list) { - ListItem* list_item = list->First(); - printf("["); - while (list_item != NULL) { - printf("%3u", list->GetUnsignedItem(list_item)); - list_item = list->Next(list_item); - } - printf("]\n"); -} - -bool CompareLists(const ListWrapperSimple* lhs, const ListWrapperSimple* rhs) { - const unsigned int list_size = lhs->GetSize(); - if (lhs->GetSize() != rhs->GetSize()) { - return false; - } - if (lhs->Empty()) { - return rhs->Empty(); - } - unsigned int i = 0; - ListItem* lhs_item = lhs->First(); - ListItem* rhs_item = rhs->First(); - while (i < list_size) { - if (lhs_item == NULL) { - return false; - } - if (rhs_item == NULL) { - return false; - } - if (lhs->GetUnsignedItem(lhs_item) != rhs->GetUnsignedItem(rhs_item)) { - return false; - } - i++; - lhs_item = lhs->Next(lhs_item); - rhs_item = rhs->Next(rhs_item); - } - return true; -} - -TEST(ListWrapperTest, ReverseNewIntList) { - // Create a new temporary list with elements reversed those of - // new_int_list_ - const scoped_ptr descending_list( - CreateDescendingList(rand() % 2)); - ASSERT_FALSE(descending_list.get() == NULL); - ASSERT_FALSE(descending_list->Empty()); - ASSERT_EQ(kNumberOfElements, descending_list->GetSize()); - - const scoped_ptr ascending_list( - CreateAscendingList(rand() % 2)); - ASSERT_FALSE(ascending_list.get() == NULL); - ASSERT_FALSE(ascending_list->Empty()); - ASSERT_EQ(kNumberOfElements, ascending_list->GetSize()); - - scoped_ptr list_to_reverse( - ListWrapperSimple::Create(rand() % 2)); - - // Reverse the list using PushBack and Previous. - for (ListItem* item = ascending_list->Last(); item != NULL; - item = ascending_list->Previous(item)) { - list_to_reverse->PushBack(ascending_list->GetUnsignedItem(item)); - } - - ASSERT_TRUE(CompareLists(descending_list.get(), list_to_reverse.get())); - - scoped_ptr list_to_un_reverse( - ListWrapperSimple::Create(rand() % 2)); - ASSERT_FALSE(list_to_un_reverse.get() == NULL); - // Reverse the reversed list using PushFront and Next. - for (ListItem* item = list_to_reverse->First(); item != NULL; - item = list_to_reverse->Next(item)) { - list_to_un_reverse->PushFront(list_to_reverse->GetUnsignedItem(item)); - } - ASSERT_TRUE(CompareLists(ascending_list.get(), list_to_un_reverse.get())); -} - -TEST(ListWrapperTest, PopTest) { - scoped_ptr ascending_list(CreateAscendingList(rand() % 2)); - ASSERT_FALSE(ascending_list.get() == NULL); - ASSERT_FALSE(ascending_list->Empty()); - EXPECT_EQ(0, ascending_list->PopFront()); - EXPECT_EQ(1U, ascending_list->GetUnsignedItem(ascending_list->First())); - - EXPECT_EQ(0, ascending_list->PopBack()); - EXPECT_EQ(kNumberOfElements - 2, ascending_list->GetUnsignedItem( - ascending_list->Last())); - EXPECT_EQ(kNumberOfElements - 2, ascending_list->GetSize()); -} - -// Use Insert to interleave two lists. -TEST(ListWrapperTest, InterLeaveTest) { - scoped_ptr interleave_list( - CreateAscendingList(rand() % 2)); - ASSERT_FALSE(interleave_list.get() == NULL); - ASSERT_FALSE(interleave_list->Empty()); - - scoped_ptr descending_list( - CreateDescendingList(rand() % 2)); - ASSERT_FALSE(descending_list.get() == NULL); - - for (unsigned int i = 0; i < kNumberOfElements / 2; ++i) { - ASSERT_EQ(0, interleave_list->PopBack()); - ASSERT_EQ(0, descending_list->PopBack()); - } - ASSERT_EQ(kNumberOfElements / 2, interleave_list->GetSize()); - ASSERT_EQ(kNumberOfElements / 2, descending_list->GetSize()); - - unsigned int insert_position = kNumberOfElements / 2; - ASSERT_EQ(insert_position * 2, kNumberOfElements); - while (!descending_list->Empty()) { - ListItem* item = descending_list->Last(); - ASSERT_FALSE(item == NULL); - - const unsigned int item_id = descending_list->GetUnsignedItem(item); - ASSERT_EQ(0, descending_list->Erase(item)); - - ListItem* insert_item = interleave_list->CreateListItem(item_id); - ASSERT_FALSE(insert_item == NULL); - item = interleave_list->First(); - ASSERT_FALSE(item == NULL); - for (unsigned int j = 0; j < insert_position - 1; ++j) { - item = interleave_list->Next(item); - ASSERT_FALSE(item == NULL); - } - EXPECT_EQ(0, interleave_list->Insert(item, insert_item)); - --insert_position; - } - - scoped_ptr interleaved_list( - CreateInterleavedList(rand() % 2)); - ASSERT_FALSE(interleaved_list.get() == NULL); - ASSERT_FALSE(interleaved_list->Empty()); - ASSERT_TRUE(CompareLists(interleaved_list.get(), interleave_list.get())); -} - -// Use InsertBefore to interleave two lists. -TEST(ListWrapperTest, InterLeaveTestII) { - scoped_ptr interleave_list( - CreateDescendingList(rand() % 2)); - ASSERT_FALSE(interleave_list.get() == NULL); - ASSERT_FALSE(interleave_list->Empty()); - - scoped_ptr ascending_list(CreateAscendingList(rand() % 2)); - ASSERT_FALSE(ascending_list.get() == NULL); - - for (unsigned int i = 0; i < kNumberOfElements / 2; ++i) { - ASSERT_EQ(0, interleave_list->PopBack()); - ASSERT_EQ(0, ascending_list->PopBack()); - } - ASSERT_EQ(kNumberOfElements / 2, interleave_list->GetSize()); - ASSERT_EQ(kNumberOfElements / 2, ascending_list->GetSize()); - - unsigned int insert_position = kNumberOfElements / 2; - ASSERT_EQ(insert_position * 2, kNumberOfElements); - while (!ascending_list->Empty()) { - ListItem* item = ascending_list->Last(); - ASSERT_FALSE(item == NULL); - - const unsigned int item_id = ascending_list->GetUnsignedItem(item); - ASSERT_EQ(0, ascending_list->Erase(item)); - - ListItem* insert_item = interleave_list->CreateListItem(item_id); - ASSERT_FALSE(insert_item == NULL); - item = interleave_list->First(); - ASSERT_FALSE(item == NULL); - for (unsigned int j = 0; j < insert_position - 1; ++j) { - item = interleave_list->Next(item); - ASSERT_FALSE(item == NULL); - } - EXPECT_EQ(interleave_list->InsertBefore(item, insert_item), 0); - --insert_position; - } - - scoped_ptr interleaved_list( - CreateInterleavedList(rand() % 2)); - ASSERT_FALSE(interleaved_list.get() == NULL); - ASSERT_FALSE(interleaved_list->Empty()); - - ASSERT_TRUE(CompareLists(interleaved_list.get(), interleave_list.get())); -} diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/logcat_trace_context.cc b/media/webrtc/trunk/webrtc/system_wrappers/source/logcat_trace_context.cc new file mode 100644 index 000000000000..313acc74e8fa --- /dev/null +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/logcat_trace_context.cc @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/system_wrappers/interface/logcat_trace_context.h" + +#include +#include + +#include "webrtc/system_wrappers/interface/logging.h" + +namespace webrtc { + +static android_LogPriority AndroidLogPriorityFromWebRtcLogLevel( + TraceLevel webrtc_level) { + // NOTE: this mapping is somewhat arbitrary. StateInfo and Info are mapped + // to DEBUG because they are highly verbose in webrtc code (which is + // unfortunate). + switch (webrtc_level) { + case webrtc::kTraceStateInfo: return ANDROID_LOG_DEBUG; + case webrtc::kTraceWarning: return ANDROID_LOG_WARN; + case webrtc::kTraceError: return ANDROID_LOG_ERROR; + case webrtc::kTraceCritical: return ANDROID_LOG_FATAL; + case webrtc::kTraceApiCall: return ANDROID_LOG_VERBOSE; + case webrtc::kTraceModuleCall: return ANDROID_LOG_VERBOSE; + case webrtc::kTraceMemory: return ANDROID_LOG_VERBOSE; + case webrtc::kTraceTimer: return ANDROID_LOG_VERBOSE; + case webrtc::kTraceStream: return ANDROID_LOG_VERBOSE; + case webrtc::kTraceDebug: return ANDROID_LOG_DEBUG; + case webrtc::kTraceInfo: return ANDROID_LOG_DEBUG; + case webrtc::kTraceTerseInfo: return ANDROID_LOG_INFO; + default: + LOG(LS_ERROR) << "Unexpected log level" << webrtc_level; + return ANDROID_LOG_FATAL; + } +} + +LogcatTraceContext::LogcatTraceContext() { + webrtc::Trace::CreateTrace(); + if (webrtc::Trace::SetTraceCallback(this) != 0) + assert(false); +} + +LogcatTraceContext::~LogcatTraceContext() { + if (webrtc::Trace::SetTraceCallback(NULL) != 0) + assert(false); + webrtc::Trace::ReturnTrace(); +} + +void LogcatTraceContext::Print(TraceLevel level, + const char* message, + int length) { + __android_log_print(AndroidLogPriorityFromWebRtcLogLevel(level), + "WEBRTC", "%.*s", length, message); +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/logging.cc b/media/webrtc/trunk/webrtc/system_wrappers/source/logging.cc index 7a33749610ee..922a27237172 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/logging.cc +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/logging.cc @@ -55,7 +55,7 @@ bool LogMessage::Loggable(LoggingSeverity sev) { LogMessage::~LogMessage() { const std::string& str = print_stream_.str(); - WEBRTC_TRACE(WebRtcSeverity(severity_), kTraceUndefined, 0, str.c_str()); + Trace::Add(WebRtcSeverity(severity_), kTraceUndefined, 0, str.c_str()); } } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/logging_no_op.cc b/media/webrtc/trunk/webrtc/system_wrappers/source/logging_no_op.cc deleted file mode 100644 index be0f799bde32..000000000000 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/logging_no_op.cc +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "webrtc/system_wrappers/interface/logging.h" - -namespace webrtc { - -LogMessage::LogMessage(const char*, int, LoggingSeverity) { - // Avoid an unused-private-field warning. - (void)severity_; -} - -LogMessage::~LogMessage() { -} - -} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/move.h b/media/webrtc/trunk/webrtc/system_wrappers/source/move.h new file mode 100644 index 000000000000..53109c73ad6e --- /dev/null +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/move.h @@ -0,0 +1,215 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// Borrowed from Chromium's src/base/move.h. + +#ifndef WEBRTC_SYSTEM_WRAPPERS_INTEFACE_MOVE_H_ +#define WEBRTC_SYSTEM_WRAPPERS_INTEFACE_MOVE_H_ + +// Macro with the boilerplate that makes a type move-only in C++03. +// +// USAGE +// +// This macro should be used instead of DISALLOW_COPY_AND_ASSIGN to create +// a "move-only" type. Unlike DISALLOW_COPY_AND_ASSIGN, this macro should be +// the first line in a class declaration. +// +// A class using this macro must call .Pass() (or somehow be an r-value already) +// before it can be: +// +// * Passed as a function argument +// * Used as the right-hand side of an assignment +// * Returned from a function +// +// Each class will still need to define their own "move constructor" and "move +// operator=" to make this useful. Here's an example of the macro, the move +// constructor, and the move operator= from the scoped_ptr class: +// +// template +// class scoped_ptr { +// MOVE_ONLY_TYPE_FOR_CPP_03(scoped_ptr, RValue) +// public: +// scoped_ptr(RValue& other) : ptr_(other.release()) { } +// scoped_ptr& operator=(RValue& other) { +// swap(other); +// return *this; +// } +// }; +// +// Note that the constructor must NOT be marked explicit. +// +// For consistency, the second parameter to the macro should always be RValue +// unless you have a strong reason to do otherwise. It is only exposed as a +// macro parameter so that the move constructor and move operator= don't look +// like they're using a phantom type. +// +// +// HOW THIS WORKS +// +// For a thorough explanation of this technique, see: +// +// http://en.wikibooks.org/wiki/More_C%2B%2B_Idioms/Move_Constructor +// +// The summary is that we take advantage of 2 properties: +// +// 1) non-const references will not bind to r-values. +// 2) C++ can apply one user-defined conversion when initializing a +// variable. +// +// The first lets us disable the copy constructor and assignment operator +// by declaring private version of them with a non-const reference parameter. +// +// For l-values, direct initialization still fails like in +// DISALLOW_COPY_AND_ASSIGN because the copy constructor and assignment +// operators are private. +// +// For r-values, the situation is different. The copy constructor and +// assignment operator are not viable due to (1), so we are trying to call +// a non-existent constructor and non-existing operator= rather than a private +// one. Since we have not committed an error quite yet, we can provide an +// alternate conversion sequence and a constructor. We add +// +// * a private struct named "RValue" +// * a user-defined conversion "operator RValue()" +// * a "move constructor" and "move operator=" that take the RValue& as +// their sole parameter. +// +// Only r-values will trigger this sequence and execute our "move constructor" +// or "move operator=." L-values will match the private copy constructor and +// operator= first giving a "private in this context" error. This combination +// gives us a move-only type. +// +// For signaling a destructive transfer of data from an l-value, we provide a +// method named Pass() which creates an r-value for the current instance +// triggering the move constructor or move operator=. +// +// Other ways to get r-values is to use the result of an expression like a +// function call. +// +// Here's an example with comments explaining what gets triggered where: +// +// class Foo { +// MOVE_ONLY_TYPE_FOR_CPP_03(Foo, RValue); +// +// public: +// ... API ... +// Foo(RValue other); // Move constructor. +// Foo& operator=(RValue rhs); // Move operator= +// }; +// +// Foo MakeFoo(); // Function that returns a Foo. +// +// Foo f; +// Foo f_copy(f); // ERROR: Foo(Foo&) is private in this context. +// Foo f_assign; +// f_assign = f; // ERROR: operator=(Foo&) is private in this context. +// +// +// Foo f(MakeFoo()); // R-value so alternate conversion executed. +// Foo f_copy(f.Pass()); // R-value so alternate conversion executed. +// f = f_copy.Pass(); // R-value so alternate conversion executed. +// +// +// IMPLEMENTATION SUBTLETIES WITH RValue +// +// The RValue struct is just a container for a pointer back to the original +// object. It should only ever be created as a temporary, and no external +// class should ever declare it or use it in a parameter. +// +// It is tempting to want to use the RValue type in function parameters, but +// excluding the limited usage here for the move constructor and move +// operator=, doing so would mean that the function could take both r-values +// and l-values equially which is unexpected. See COMPARED To Boost.Move for +// more details. +// +// An alternate, and incorrect, implementation of the RValue class used by +// Boost.Move makes RValue a fieldless child of the move-only type. RValue& +// is then used in place of RValue in the various operators. The RValue& is +// "created" by doing *reinterpret_cast(this). This has the appeal +// of never creating a temporary RValue struct even with optimizations +// disabled. Also, by virtue of inheritance you can treat the RValue +// reference as if it were the move-only type itself. Unfortunately, +// using the result of this reinterpret_cast<> is actually undefined behavior +// due to C++98 5.2.10.7. In certain compilers (e.g., NaCl) the optimizer +// will generate non-working code. +// +// In optimized builds, both implementations generate the same assembly so we +// choose the one that adheres to the standard. +// +// +// COMPARED TO C++11 +// +// In C++11, you would implement this functionality using an r-value reference +// and our .Pass() method would be replaced with a call to std::move(). +// +// This emulation also has a deficiency where it uses up the single +// user-defined conversion allowed by C++ during initialization. This can +// cause problems in some API edge cases. For instance, in scoped_ptr, it is +// impossible to make a function "void Foo(scoped_ptr p)" accept a +// value of type scoped_ptr even if you add a constructor to +// scoped_ptr<> that would make it look like it should work. C++11 does not +// have this deficiency. +// +// +// COMPARED TO Boost.Move +// +// Our implementation similar to Boost.Move, but we keep the RValue struct +// private to the move-only type, and we don't use the reinterpret_cast<> hack. +// +// In Boost.Move, RValue is the boost::rv<> template. This type can be used +// when writing APIs like: +// +// void MyFunc(boost::rv& f) +// +// that can take advantage of rv<> to avoid extra copies of a type. However you +// would still be able to call this version of MyFunc with an l-value: +// +// Foo f; +// MyFunc(f); // Uh oh, we probably just destroyed |f| w/o calling Pass(). +// +// unless someone is very careful to also declare a parallel override like: +// +// void MyFunc(const Foo& f) +// +// that would catch the l-values first. This was declared unsafe in C++11 and +// a C++11 compiler will explicitly fail MyFunc(f). Unfortunately, we cannot +// ensure this in C++03. +// +// Since we have no need for writing such APIs yet, our implementation keeps +// RValue private and uses a .Pass() method to do the conversion instead of +// trying to write a version of "std::move()." Writing an API like std::move() +// would require the RValue struct to be public. +// +// +// CAVEATS +// +// If you include a move-only type as a field inside a class that does not +// explicitly declare a copy constructor, the containing class's implicit +// copy constructor will change from Containing(const Containing&) to +// Containing(Containing&). This can cause some unexpected errors. +// +// http://llvm.org/bugs/show_bug.cgi?id=11528 +// +// The workaround is to explicitly declare your copy constructor. +// +#define WEBRTC_MOVE_ONLY_TYPE_FOR_CPP_03(type, rvalue_type) \ + private: \ + struct rvalue_type { \ + explicit rvalue_type(type* object) : object(object) {} \ + type* object; \ + }; \ + type(type&); \ + void operator=(type&); \ + public: \ + operator rvalue_type() { return rvalue_type(this); } \ + type Pass() { return type(rvalue_type(this)); } \ + private: + +#endif // WEBRTC_SYSTEM_WRAPPERS_INTEFACE_MOVE_H_ diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/system_wrappers.gyp b/media/webrtc/trunk/webrtc/system_wrappers/source/system_wrappers.gyp index 132795d74cbf..1236ba282ac2 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/system_wrappers.gyp +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/system_wrappers.gyp @@ -37,7 +37,7 @@ '../interface/event_wrapper.h', '../interface/file_wrapper.h', '../interface/fix_interlocked_exchange_pointer_win.h', - '../interface/list_wrapper.h', + '../interface/logcat_trace_context.h', '../interface/logging.h', '../interface/ref_count.h', '../interface/rw_lock_wrapper.h', @@ -47,6 +47,7 @@ '../interface/sort.h', '../interface/static_instance.h', '../interface/stringize_macros.h', + '../interface/thread_annotations.h', '../interface/thread_wrapper.h', '../interface/tick_util.h', '../interface/trace.h', @@ -81,9 +82,8 @@ 'event_win.h', 'file_impl.cc', 'file_impl.h', - 'list_no_stl.cc', + 'logcat_trace_context.cc', 'logging.cc', - 'logging_no_op.cc', 'rw_lock.cc', 'rw_lock_generic.cc', 'rw_lock_generic.h', @@ -102,7 +102,6 @@ 'thread_win.h', 'trace_impl.cc', 'trace_impl.h', - 'trace_impl_no_op.cc', 'trace_posix.cc', 'trace_posix.h', 'trace_win.cc', @@ -114,22 +113,6 @@ }, { 'sources!': [ 'data_log.cc', ], },], - ['enable_tracing==1', { - 'sources!': [ - 'logging_no_op.cc', - 'trace_impl_no_op.cc', - ], - }, { - 'sources!': [ - 'logging.cc', - 'trace_impl.cc', - 'trace_impl.h', - 'trace_posix.cc', - 'trace_posix.h', - 'trace_win.cc', - 'trace_win.h', - ], - }], ['enable_lazy_trace_alloc==1', { 'defines': [ 'WEBRTC_LAZY_TRACE_ALLOC', @@ -150,6 +133,16 @@ 'rw_lock_posix.cc', 'rw_lock_posix.h', ], + 'link_settings': { + 'libraries': [ + '-llog', + ], + }, + }, { # OS!="android" + 'sources!': [ + '../interface/logcat_trace_context.h', + 'logcat_trace_context.cc', + ], }], ['OS=="linux"', { 'defines': [ @@ -207,15 +200,9 @@ ['OS=="android" or moz_widget_toolkit_gonk==1', { 'targets': [ { - 'variables': { - # Treat this as third-party code. - 'chromium_code': 0, - }, 'target_name': 'cpu_features_android', 'type': 'static_library', 'sources': [ - # TODO(leozwang): Ideally we want to audomatically exclude .c files - # as with .cc files, gyp currently only excludes .cc files. 'cpu_features_android.c', ], 'conditions': [ @@ -231,15 +218,14 @@ ], }], ], - }, { - 'sources': [ - 'android/cpu-features.c', - 'android/cpu-features.h', - ], - }], - ], - }, - ], + }, { + 'sources': [ + 'droid-cpu-features.c', + 'droid-cpu-features.h', + ], + }], + ], + }], }], ], # conditions } diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/system_wrappers_tests.gyp b/media/webrtc/trunk/webrtc/system_wrappers/source/system_wrappers_tests.gyp index 36c47d23bce2..06737d23c2e2 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/system_wrappers_tests.gyp +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/system_wrappers_tests.gyp @@ -18,13 +18,11 @@ '<(webrtc_root)/test/test.gyp:test_support_main', ], 'sources': [ - '../../common_unittest.cc', 'aligned_malloc_unittest.cc', 'clock_unittest.cc', 'condition_variable_unittest.cc', 'critical_section_unittest.cc', 'event_tracer_unittest.cc', - 'list_unittest.cc', 'logging_unittest.cc', 'data_log_unittest.cc', 'data_log_unittest_disabled.cc', @@ -79,10 +77,10 @@ 'target_name': 'system_wrappers_unittests_run', 'type': 'none', 'dependencies': [ - '<(import_isolate_path):import_isolate_gypi', 'system_wrappers_unittests', ], 'includes': [ + '../../build/isolate.gypi', 'system_wrappers_unittests.isolate', ], 'sources': [ diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/system_wrappers_unittests.isolate b/media/webrtc/trunk/webrtc/system_wrappers/source/system_wrappers_unittests.isolate index c13ad8865640..6ec5e7c6420c 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/system_wrappers_unittests.isolate +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/system_wrappers_unittests.isolate @@ -13,8 +13,8 @@ # of a Chromium checkout, this is one level above the standalone build. 'variables': { 'isolate_dependency_untracked': [ - '../../../../resources/', '../../../../data/', + '../../../../resources/', ], }, }], @@ -22,15 +22,15 @@ 'variables': { 'command': [ '../../../testing/test_env.py', - '../../../tools/swarm_client/googletest/run_test_cases.py', '<(PRODUCT_DIR)/system_wrappers_unittests<(EXECUTABLE_SUFFIX)', ], 'isolate_dependency_tracked': [ '../../../testing/test_env.py', - '../../../tools/swarm_client/run_isolated.py', - '../../../tools/swarm_client/googletest/run_test_cases.py', '<(PRODUCT_DIR)/system_wrappers_unittests<(EXECUTABLE_SUFFIX)', ], + 'isolate_dependency_untracked': [ + '../../../tools/swarming_client/', + ], }, }], ], diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/thread_unittest.cc b/media/webrtc/trunk/webrtc/system_wrappers/source/thread_unittest.cc index d37f45b5ebdb..25095e6599ba 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/thread_unittest.cc +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/thread_unittest.cc @@ -20,10 +20,6 @@ bool NullRunFunction(void* obj) { return true; } -// Disable for TSan v2, see -// https://code.google.com/p/webrtc/issues/detail?id=2259 for details. -#if !defined(THREAD_SANITIZER) - TEST(ThreadTest, StartStop) { ThreadWrapper* thread = ThreadWrapper::CreateThread(&NullRunFunction, NULL); unsigned int id = 42; @@ -54,6 +50,4 @@ TEST(ThreadTest, RunFunctionIsCalled) { delete thread; } -#endif // if !defined(THREAD_SANITIZER) - } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/trace_impl.cc b/media/webrtc/trunk/webrtc/system_wrappers/source/trace_impl.cc index 4399bcd27dcc..670a4875d8f2 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/trace_impl.cc +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/trace_impl.cc @@ -275,6 +275,10 @@ int32_t TraceImpl::AddModuleAndId(char* trace_message, sprintf(trace_message, " VIDEO PROC:%5ld %5ld;", id_engine, id_channel); break; + case kTraceRemoteBitrateEstimator: + sprintf(trace_message, " BWE RBE:%5ld %5ld;", id_engine, + id_channel); + break; } } else { switch (module) { @@ -334,6 +338,9 @@ int32_t TraceImpl::AddModuleAndId(char* trace_message, case kTraceVideoPreocessing: sprintf(trace_message, " VIDEO PROC:%11ld;", idl); break; + case kTraceRemoteBitrateEstimator: + sprintf(trace_message, " BWE RBE:%11ld;", idl); + break; } } return kMessageLength; @@ -460,8 +467,8 @@ void TraceImpl::AddMessageToList( // the message in the buffer. Use the indexing as this minimizes // cache misses/etc if (!message_queue_[active_queue_][idx]) { - return; - } + return; +} #endif if (idx >= WEBRTC_TRACE_MAX_QUEUE) { diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/trace_impl_no_op.cc b/media/webrtc/trunk/webrtc/system_wrappers/source/trace_impl_no_op.cc deleted file mode 100644 index fe57859f0c7a..000000000000 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/trace_impl_no_op.cc +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "webrtc/system_wrappers/interface/trace.h" - -namespace webrtc { - -const int Trace::kBoilerplateLength = 71; -const int Trace::kTimestampPosition = 13; -const int Trace::kTimestampLength = 12; -uint32_t Trace::level_filter_ = kTraceNone; - -void Trace::CreateTrace() { -} - -void Trace::ReturnTrace() { -} - -int32_t Trace::TraceFile(char file_name[1024]) { - return -1; -} - -int32_t Trace::SetTraceFile(const char* file_name, - const bool add_file_counter) { - return -1; -} - -int32_t Trace::SetTraceCallback(TraceCallback* callback) { - return -1; -} - -void Trace::Add(const TraceLevel level, const TraceModule module, - const int32_t id, const char* msg, ...) { -} - -} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/trace_posix.cc b/media/webrtc/trunk/webrtc/system_wrappers/source/trace_posix.cc index bfce2af8f6ba..360f7c7f4777 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/trace_posix.cc +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/trace_posix.cc @@ -17,10 +17,6 @@ #include #include -#ifdef WEBRTC_ANDROID -#include -#endif - #if defined(_DEBUG) #define BUILDMODE "d" #elif defined(DEBUG) diff --git a/media/webrtc/trunk/webrtc/system_wrappers/test/list/list.cc b/media/webrtc/trunk/webrtc/system_wrappers/test/list/list.cc deleted file mode 100644 index 35ddf335859f..000000000000 --- a/media/webrtc/trunk/webrtc/system_wrappers/test/list/list.cc +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#include - -#include "webrtc/system_wrappers/interface/list_wrapper.h" - -const int kNumberOfElements = 10; - -void FailTest(bool failed) -{ - if (failed) - { - printf("Test failed!\n"); - printf("Press enter to continue:"); - getchar(); - exit(0); - } -} - -int GetStoredIntegerValue(ListItem* list_item) -{ - void* list_item_pointer = list_item->GetItem(); - if (list_item_pointer != NULL) - { - return *(reinterpret_cast(list_item_pointer)); - } - return static_cast(list_item->GetUnsignedItem()); -} - -void PrintList(ListWrapper& list) -{ - ListItem* list_item = list.First(); - printf("List: "); - while (list_item != NULL) - { - int item_value = GetStoredIntegerValue(list_item); - FailTest(item_value < 0); - printf(" %d",item_value); - list_item = list.Next(list_item); - } - printf("\n"); -} - -// The list should always be in ascending order -void ListSanity(ListWrapper& list) -{ - if(list.Empty()) - { - return; - } - ListItem* item_iter = list.First(); - // Fake a previous value for the first iteration - int previous_value = GetStoredIntegerValue(item_iter) - 1; - while (item_iter != NULL) - { - const int value = GetStoredIntegerValue(item_iter); - FailTest(value != previous_value + 1); - previous_value = value; - item_iter = list.Next(item_iter); - } -} - -int main(int /*argc*/, char* /*argv*/[]) -{ - printf("List Test:\n"); - int element_array[kNumberOfElements]; - for (int i = 0; i < kNumberOfElements; i++) - { - element_array[i] = i; - } - // Test PushBack 1 - ListWrapper test_list; - for (int i = 2; i < kNumberOfElements - 2; i++) - { - FailTest(test_list.PushBack((void*)&element_array[i]) != 0); - } - // Test PushBack 2 - FailTest(test_list.PushBack(element_array[kNumberOfElements - 2]) != 0); - FailTest(test_list.PushBack(element_array[kNumberOfElements - 1]) != 0); - // Test PushFront 2 - FailTest(test_list.PushFront(element_array[1]) != 0); - // Test PushFront 1 - FailTest(test_list.PushFront((void*)&element_array[0]) != 0); - // Test GetSize - FailTest(test_list.GetSize() != kNumberOfElements); - PrintList(test_list); - //Test PopFront - FailTest(test_list.PopFront() != 0); - //Test PopBack - FailTest(test_list.PopBack() != 0); - // Test GetSize - FailTest(test_list.GetSize() != kNumberOfElements - 2); - // Test Empty - FailTest(test_list.Empty()); - // Test First - ListItem* first_item = test_list.First(); - FailTest(first_item == NULL); - // Test Last - ListItem* last_item = test_list.Last(); - FailTest(last_item == NULL); - // Test Next - ListItem* second_item = test_list.Next(first_item); - FailTest(second_item == NULL); - FailTest(test_list.Next(last_item) != NULL); - FailTest(test_list.Next(NULL) != NULL); - // Test Previous - ListItem* second_to_last_item = test_list.Previous(last_item); - FailTest(second_to_last_item == NULL); - FailTest(test_list.Previous(first_item) != NULL); - FailTest(test_list.Previous(NULL) != NULL); - // Test GetUnsignedItem - FailTest(last_item->GetUnsignedItem() != - kNumberOfElements - 2); - FailTest(last_item->GetItem() != - NULL); - // Test GetItem - FailTest(GetStoredIntegerValue(second_to_last_item) != - kNumberOfElements - 3); - FailTest(second_to_last_item->GetUnsignedItem() != 0); - // Pop last and first since they are pushed as unsigned items. - FailTest(test_list.PopFront() != 0); - FailTest(test_list.PopBack() != 0); - // Test Insert. Please note that old iterators are no longer valid at - // this point. - ListItem* insert_item_last = new ListItem(reinterpret_cast(&element_array[kNumberOfElements - 2])); - FailTest(test_list.Insert(test_list.Last(),insert_item_last) != 0); - FailTest(test_list.Insert(NULL,insert_item_last) == 0); - ListItem* insert_item_last2 = new ListItem(reinterpret_cast(&element_array[kNumberOfElements - 2])); - FailTest(test_list.Insert(insert_item_last2,NULL) == 0); - // test InsertBefore - ListItem* insert_item_first = new ListItem(reinterpret_cast(&element_array[1])); - FailTest(test_list.InsertBefore(test_list.First(),insert_item_first) != 0); - FailTest(test_list.InsertBefore(NULL,insert_item_first) == 0); - ListItem* insert_item_first2 = new ListItem(reinterpret_cast(&element_array[1])); - FailTest(test_list.InsertBefore(insert_item_first2,NULL) == 0); - PrintList(test_list); - ListSanity(test_list); - // Erase the whole list - int counter = 0; - while (test_list.PopFront() == 0) - { - FailTest(counter++ > kNumberOfElements); - } - PrintList(test_list); - // Test APIs when list is empty - FailTest(test_list.GetSize() != 0); - FailTest(test_list.PopFront() != -1); - FailTest(test_list.PopBack() != -1); - FailTest(!test_list.Empty()); - FailTest(test_list.First() != NULL); - FailTest(test_list.Last() != NULL); - FailTest(test_list.Next(NULL) != NULL); - FailTest(test_list.Previous(NULL) != NULL); - FailTest(test_list.Erase(NULL) != -1); - // Test Insert APIs when list is empty - ListItem* new_item = new ListItem(reinterpret_cast(&element_array[0])); - FailTest(test_list.Insert(NULL,new_item) != 0); - FailTest(test_list.Empty()); - FailTest(test_list.PopFront() != 0); - ListItem* new_item2 = new ListItem(reinterpret_cast(&element_array[0])); - FailTest(test_list.InsertBefore(NULL,new_item2) != 0); - FailTest(test_list.Empty()); - - printf("Tests passed successfully!\n"); -} diff --git a/media/webrtc/trunk/webrtc/test/channel_transport/channel_transport.cc b/media/webrtc/trunk/webrtc/test/channel_transport/channel_transport.cc index 9500c9030196..2bf02e95da3f 100644 --- a/media/webrtc/trunk/webrtc/test/channel_transport/channel_transport.cc +++ b/media/webrtc/trunk/webrtc/test/channel_transport/channel_transport.cc @@ -105,7 +105,8 @@ void VideoChannelTransport::IncomingRTPPacket( const int32_t packet_length, const char* /*from_ip*/, const uint16_t /*from_port*/) { - vie_network_->ReceivedRTPPacket(channel_, incoming_rtp_packet, packet_length); + vie_network_->ReceivedRTPPacket( + channel_, incoming_rtp_packet, packet_length, PacketTime()); } void VideoChannelTransport::IncomingRTCPPacket( diff --git a/media/webrtc/trunk/webrtc/test/channel_transport/udp_socket2_manager_win.cc b/media/webrtc/trunk/webrtc/test/channel_transport/udp_socket2_manager_win.cc index 54bd42f20516..bfab51e6c4d5 100644 --- a/media/webrtc/trunk/webrtc/test/channel_transport/udp_socket2_manager_win.cc +++ b/media/webrtc/trunk/webrtc/test/channel_transport/udp_socket2_manager_win.cc @@ -65,14 +65,11 @@ UdpSocket2ManagerWindows::~UdpSocket2ManagerWindows() } StopWorkerThreads(); - // All threads are stopped. Safe to delete them. - ListItem* pItem = NULL; - while((pItem = _workerThreadsList.First()) != NULL) - { - delete static_cast(pItem->GetItem()); - _workerThreadsList.PopFront(); + for (WorkerList::iterator iter = _workerThreadsList.begin(); + iter != _workerThreadsList.end(); ++iter) { + delete *iter; } - + _workerThreadsList.clear(); _ioContextPool.Free(); _numOfActiveManagers--; @@ -134,14 +131,10 @@ bool UdpSocket2ManagerWindows::Start() // Start worker threads. _stopped = false; int32_t error = 0; - ListItem* pItem = _workerThreadsList.First(); - UdpSocket2WorkerWindows* pWorker; - while(pItem != NULL && !error) - { - pWorker = (UdpSocket2WorkerWindows*)pItem->GetItem(); - if(!pWorker->Start()) - error = 1; - pItem = _workerThreadsList.Next(pItem); + for (WorkerList::iterator iter = _workerThreadsList.begin(); + iter != _workerThreadsList.end() && !error; ++iter) { + if(!(*iter)->Start()) + error = 1; } if(error) { @@ -194,7 +187,7 @@ bool UdpSocket2ManagerWindows::StartWorkerThreads() delete pWorker; break; } - _workerThreadsList.PushFront(pWorker); + _workerThreadsList.push_front(pWorker); i++; } if(error) @@ -207,12 +200,11 @@ bool UdpSocket2ManagerWindows::StartWorkerThreads() "creating work threads", _managerNumber); // Delete worker threads. - ListItem* pItem = NULL; - while((pItem = _workerThreadsList.First()) != NULL) - { - delete static_cast(pItem->GetItem()); - _workerThreadsList.PopFront(); + for (WorkerList::iterator iter = _workerThreadsList.begin(); + iter != _workerThreadsList.end(); ++iter) { + delete *iter; } + _workerThreadsList.clear(); _pCrit->Leave(); return false; } @@ -281,38 +273,30 @@ bool UdpSocket2ManagerWindows::StopWorkerThreads() threadsStoped, numActicve Sockets=%d", _managerNumber, _numActiveSockets); - UdpSocket2WorkerWindows* pWorker; - ListItem* pItem = _workerThreadsList.First(); // Set worker threads to not alive so that they will stop calling // UdpSocket2WorkerWindows::Run(). - while(pItem != NULL) - { - pWorker = (UdpSocket2WorkerWindows*)pItem->GetItem(); - pWorker->SetNotAlive(); - pItem = _workerThreadsList.Next(pItem); + for (WorkerList::iterator iter = _workerThreadsList.begin(); + iter != _workerThreadsList.end(); ++iter) { + (*iter)->SetNotAlive(); } // Release all threads waiting for GetQueuedCompletionStatus(..). if(_ioCompletionHandle) { uint32_t i = 0; - for(i = 0; i < _workerThreadsList.GetSize(); i++) + for(i = 0; i < _workerThreadsList.size(); i++) { PostQueuedCompletionStatus(_ioCompletionHandle, 0 ,0 , NULL); } } - pItem = _workerThreadsList.First(); - - while(pItem != NULL) - { - pWorker = (UdpSocket2WorkerWindows*)pItem->GetItem(); - if(pWorker->Stop() == false) + for (WorkerList::iterator iter = _workerThreadsList.begin(); + iter != _workerThreadsList.end(); ++iter) { + if((*iter)->Stop() == false) { error = -1; WEBRTC_TRACE(kTraceWarning, kTraceTransport, -1, "failed to stop worker thread"); } - pItem = _workerThreadsList.Next(pItem); } if(error) diff --git a/media/webrtc/trunk/webrtc/test/channel_transport/udp_socket2_manager_win.h b/media/webrtc/trunk/webrtc/test/channel_transport/udp_socket2_manager_win.h index 0eef997fde32..956cc0303196 100644 --- a/media/webrtc/trunk/webrtc/test/channel_transport/udp_socket2_manager_win.h +++ b/media/webrtc/trunk/webrtc/test/channel_transport/udp_socket2_manager_win.h @@ -12,11 +12,11 @@ #define WEBRTC_TEST_CHANNEL_TRANSPORT_UDP_SOCKET2_MANAGER_WINDOWS_H_ #include +#include #include "webrtc/system_wrappers/interface/atomic32.h" #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" #include "webrtc/system_wrappers/interface/event_wrapper.h" -#include "webrtc/system_wrappers/interface/list_wrapper.h" #include "webrtc/system_wrappers/interface/thread_wrapper.h" #include "webrtc/test/channel_transport/udp_socket2_win.h" #include "webrtc/test/channel_transport/udp_socket_manager_wrapper.h" @@ -91,6 +91,27 @@ private: Atomic32 _inUse; }; +class UdpSocket2WorkerWindows +{ +public: + UdpSocket2WorkerWindows(HANDLE ioCompletionHandle); + virtual ~UdpSocket2WorkerWindows(); + + virtual bool Start(); + virtual bool Stop(); + virtual int32_t Init(); + virtual void SetNotAlive(); +protected: + static bool Run(ThreadObj obj); + bool Process(); +private: + HANDLE _ioCompletionHandle; + ThreadWrapper*_pThread; + static int32_t _numOfWorkers; + int32_t _workerNumber; + volatile bool _stop; + bool _init; +}; class UdpSocket2ManagerWindows : public UdpSocketManager { @@ -115,6 +136,7 @@ public: int32_t PushIoContext(PerIoContext* pIoContext); private: + typedef std::list WorkerList; bool StopWorkerThreads(); bool StartWorkerThreads(); bool AddSocketPrv(UdpSocket2Windows* s); @@ -129,35 +151,13 @@ private: volatile bool _stopped; bool _init; int32_t _numActiveSockets; - ListWrapper _workerThreadsList; + WorkerList _workerThreadsList; EventWrapper* _event; HANDLE _ioCompletionHandle; IoContextPool _ioContextPool; }; -class UdpSocket2WorkerWindows -{ -public: - UdpSocket2WorkerWindows(HANDLE ioCompletionHandle); - virtual ~UdpSocket2WorkerWindows(); - - virtual bool Start(); - virtual bool Stop(); - virtual int32_t Init(); - virtual void SetNotAlive(); -protected: - static bool Run(ThreadObj obj); - bool Process(); -private: - HANDLE _ioCompletionHandle; - ThreadWrapper*_pThread; - static int32_t _numOfWorkers; - int32_t _workerNumber; - volatile bool _stop; - bool _init; -}; - } // namespace test } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/test/channel_transport/udp_socket2_win.h b/media/webrtc/trunk/webrtc/test/channel_transport/udp_socket2_win.h index 0bc3581ead99..e4d6b8c23f2a 100644 --- a/media/webrtc/trunk/webrtc/test/channel_transport/udp_socket2_win.h +++ b/media/webrtc/trunk/webrtc/test/channel_transport/udp_socket2_win.h @@ -23,7 +23,6 @@ #include "webrtc/system_wrappers/interface/condition_variable_wrapper.h" #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" #include "webrtc/system_wrappers/interface/event_wrapper.h" -#include "webrtc/system_wrappers/interface/list_wrapper.h" #include "webrtc/system_wrappers/interface/rw_lock_wrapper.h" #include "webrtc/system_wrappers/interface/trace.h" #include "webrtc/test/channel_transport/udp_socket2_manager_win.h" diff --git a/media/webrtc/trunk/webrtc/test/channel_transport/udp_socket_manager_posix.cc b/media/webrtc/trunk/webrtc/test/channel_transport/udp_socket_manager_posix.cc index 5543814b36a4..1e13475ac4d7 100644 --- a/media/webrtc/trunk/webrtc/test/channel_transport/udp_socket_manager_posix.cc +++ b/media/webrtc/trunk/webrtc/test/channel_transport/udp_socket_manager_posix.cc @@ -322,7 +322,7 @@ bool UdpSocketManagerPosixImpl::AddSocket(UdpSocketWrapper* s) return false; } _critSectList->Enter(); - _addList.PushBack(s); + _addList.push_back(s); _critSectList->Leave(); return true; } @@ -333,26 +333,24 @@ bool UdpSocketManagerPosixImpl::RemoveSocket(UdpSocketWrapper* s) _critSectList->Enter(); // If the socket is in the add list it's safe to remove and delete it. - ListItem* addListItem = _addList.First(); - while(addListItem) - { - UdpSocketPosix* addSocket = (UdpSocketPosix*)addListItem->GetItem(); + for (SocketList::iterator iter = _addList.begin(); + iter != _addList.end(); ++iter) { + UdpSocketPosix* addSocket = static_cast(*iter); unsigned int addFD = addSocket->GetFd(); unsigned int removeFD = static_cast(s)->GetFd(); if(removeFD == addFD) { - _removeList.PushBack(removeFD); + _removeList.push_back(removeFD); _critSectList->Leave(); return true; } - addListItem = _addList.Next(addListItem); } // Checking the socket map is safe since all Erase and Insert calls to this // map are also protected by _critSectList. if (_socketMap.find(static_cast(s)->GetFd()) != _socketMap.end()) { - _removeList.PushBack(static_cast(s)->GetFd()); + _removeList.push_back(static_cast(s)->GetFd()); _critSectList->Leave(); return true; } @@ -364,25 +362,23 @@ void UdpSocketManagerPosixImpl::UpdateSocketMap() { // Remove items in remove list. _critSectList->Enter(); - while(!_removeList.Empty()) - { + for (FdList::iterator iter = _removeList.begin(); + iter != _removeList.end(); ++iter) { UdpSocketPosix* deleteSocket = NULL; - SOCKET removeFD = _removeList.First()->GetUnsignedItem(); + SOCKET removeFD = *iter; // If the socket is in the add list it hasn't been added to the socket // map yet. Just remove the socket from the add list. - ListItem* addListItem = _addList.First(); - while(addListItem) - { - UdpSocketPosix* addSocket = (UdpSocketPosix*)addListItem->GetItem(); + for (SocketList::iterator iter = _addList.begin(); + iter != _addList.end(); ++iter) { + UdpSocketPosix* addSocket = static_cast(*iter); SOCKET addFD = addSocket->GetFd(); if(removeFD == addFD) { deleteSocket = addSocket; - _addList.Erase(addListItem); + _addList.erase(iter); break; } - addListItem = _addList.Next(addListItem); } // Find and remove socket from _socketMap. @@ -398,19 +394,18 @@ void UdpSocketManagerPosixImpl::UpdateSocketMap() deleteSocket->ReadyForDeletion(); delete deleteSocket; } - _removeList.PopFront(); } + _removeList.clear(); // Add sockets from add list. - while(!_addList.Empty()) - { - UdpSocketPosix* s = - static_cast(_addList.First()->GetItem()); + for (SocketList::iterator iter = _addList.begin(); + iter != _addList.end(); ++iter) { + UdpSocketPosix* s = static_cast(*iter); if(s) { _socketMap[s->GetFd()] = s; } - _addList.PopFront(); } + _addList.clear(); _critSectList->Leave(); } diff --git a/media/webrtc/trunk/webrtc/test/channel_transport/udp_socket_manager_posix.h b/media/webrtc/trunk/webrtc/test/channel_transport/udp_socket_manager_posix.h index 0efd6ffa7865..cbcb47967fbc 100644 --- a/media/webrtc/trunk/webrtc/test/channel_transport/udp_socket_manager_posix.h +++ b/media/webrtc/trunk/webrtc/test/channel_transport/udp_socket_manager_posix.h @@ -14,10 +14,10 @@ #include #include +#include #include #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" -#include "webrtc/system_wrappers/interface/list_wrapper.h" #include "webrtc/system_wrappers/interface/thread_wrapper.h" #include "webrtc/test/channel_transport/udp_socket_manager_wrapper.h" #include "webrtc/test/channel_transport/udp_socket_wrapper.h" @@ -74,14 +74,16 @@ protected: void UpdateSocketMap(); private: + typedef std::list SocketList; + typedef std::list FdList; ThreadWrapper* _thread; CriticalSectionWrapper* _critSectList; fd_set _readFds; std::map _socketMap; - ListWrapper _addList; - ListWrapper _removeList; + SocketList _addList; + FdList _removeList; }; } // namespace test diff --git a/media/webrtc/trunk/webrtc/test/channel_transport/udp_socket_manager_unittest.cc b/media/webrtc/trunk/webrtc/test/channel_transport/udp_socket_manager_unittest.cc index eccd8ea0265d..d8e66b939564 100644 --- a/media/webrtc/trunk/webrtc/test/channel_transport/udp_socket_manager_unittest.cc +++ b/media/webrtc/trunk/webrtc/test/channel_transport/udp_socket_manager_unittest.cc @@ -23,10 +23,6 @@ namespace webrtc { namespace test { -// Disable for TSan v2, see -// https://code.google.com/p/webrtc/issues/detail?id=2334 for details. -#if !defined(THREAD_SANITIZER) - TEST(UdpSocketManager, CreateCallsInitAndDoesNotLeakMemory) { int32_t id = 42; uint8_t threads = 1; @@ -84,7 +80,5 @@ TEST(UdpSocketManager, UnremovedSocketsGetCollectedAtManagerDeletion) { #endif } -#endif // if !defined(THREAD_SANITIZER) - } // namespace test } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/test/channel_transport/udp_transport_unittest.cc b/media/webrtc/trunk/webrtc/test/channel_transport/udp_transport_unittest.cc index 8ed204a28e8a..5ccc9ef2b848 100644 --- a/media/webrtc/trunk/webrtc/test/channel_transport/udp_transport_unittest.cc +++ b/media/webrtc/trunk/webrtc/test/channel_transport/udp_transport_unittest.cc @@ -108,10 +108,6 @@ private: std::vector sockets_created_; }; -// Disable for TSan v2, see -// https://code.google.com/p/webrtc/issues/detail?id=2334 for details. -#if !defined(THREAD_SANITIZER) - TEST_F(UDPTransportTest, CreateTransport) { int32_t id = 0; uint8_t threads = 1; @@ -119,8 +115,6 @@ TEST_F(UDPTransportTest, CreateTransport) { UdpTransport::Destroy(transport); } -#endif // if !defined(THREAD_SANITIZER) - // This test verifies that the mock_socket is not called from the constructor. TEST_F(UDPTransportTest, ConstructorDoesNotCreateSocket) { int32_t id = 0; diff --git a/media/webrtc/trunk/webrtc/common_unittest.cc b/media/webrtc/trunk/webrtc/test/common_unittest.cc similarity index 100% rename from media/webrtc/trunk/webrtc/common_unittest.cc rename to media/webrtc/trunk/webrtc/test/common_unittest.cc diff --git a/media/webrtc/trunk/webrtc/test/configurable_frame_size_encoder.cc b/media/webrtc/trunk/webrtc/test/configurable_frame_size_encoder.cc new file mode 100644 index 000000000000..b246da357544 --- /dev/null +++ b/media/webrtc/trunk/webrtc/test/configurable_frame_size_encoder.cc @@ -0,0 +1,95 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/test/configurable_frame_size_encoder.h" + +#include + +#include "webrtc/common_video/interface/video_image.h" +#include "testing/gtest/include/gtest/gtest.h" + +namespace webrtc { +namespace test { + +ConfigurableFrameSizeEncoder::ConfigurableFrameSizeEncoder( + uint32_t max_frame_size) + : callback_(NULL), + max_frame_size_(max_frame_size), + current_frame_size_(max_frame_size), + buffer_(new uint8_t[max_frame_size]) { + memset(buffer_.get(), 0, max_frame_size); +} + +ConfigurableFrameSizeEncoder::~ConfigurableFrameSizeEncoder() {} + +int32_t ConfigurableFrameSizeEncoder::InitEncode( + const VideoCodec* codec_settings, + int32_t number_of_cores, + uint32_t max_payload_size) { + return WEBRTC_VIDEO_CODEC_OK; +} + +int32_t ConfigurableFrameSizeEncoder::Encode( + const I420VideoFrame& inputImage, + const CodecSpecificInfo* codecSpecificInfo, + const std::vector* frame_types) { + EncodedImage encodedImage( + buffer_.get(), current_frame_size_, max_frame_size_); + encodedImage._completeFrame = true; + encodedImage._encodedHeight = inputImage.height(); + encodedImage._encodedWidth = inputImage.width(); + encodedImage._frameType = kKeyFrame; + encodedImage._timeStamp = inputImage.timestamp(); + encodedImage.capture_time_ms_ = inputImage.render_time_ms(); + RTPFragmentationHeader* fragmentation = NULL; + CodecSpecificInfo specific; + memset(&specific, 0, sizeof(specific)); + callback_->Encoded(encodedImage, &specific, fragmentation); + + return WEBRTC_VIDEO_CODEC_OK; +} + +int32_t ConfigurableFrameSizeEncoder::RegisterEncodeCompleteCallback( + EncodedImageCallback* callback) { + callback_ = callback; + return WEBRTC_VIDEO_CODEC_OK; +} + +int32_t ConfigurableFrameSizeEncoder::Release() { + return WEBRTC_VIDEO_CODEC_OK; +} + +int32_t ConfigurableFrameSizeEncoder::SetChannelParameters(uint32_t packet_loss, + int rtt) { + return WEBRTC_VIDEO_CODEC_OK; +} + +int32_t ConfigurableFrameSizeEncoder::SetRates(uint32_t new_bit_rate, + uint32_t frame_rate) { + return WEBRTC_VIDEO_CODEC_OK; +} + +int32_t ConfigurableFrameSizeEncoder::SetPeriodicKeyFrames(bool enable) { + return WEBRTC_VIDEO_CODEC_OK; +} + +int32_t ConfigurableFrameSizeEncoder::CodecConfigParameters(uint8_t* buffer, + int32_t size) { + return WEBRTC_VIDEO_CODEC_OK; +} + +int32_t ConfigurableFrameSizeEncoder::SetFrameSize(uint32_t size) { + assert(size <= max_frame_size_); + current_frame_size_ = size; + return WEBRTC_VIDEO_CODEC_OK; +} + +} // namespace test +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/test/configurable_frame_size_encoder.h b/media/webrtc/trunk/webrtc/test/configurable_frame_size_encoder.h new file mode 100644 index 000000000000..f29038fac75a --- /dev/null +++ b/media/webrtc/trunk/webrtc/test/configurable_frame_size_encoder.h @@ -0,0 +1,61 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_TEST_CONFIGURABLE_FRAME_SIZE_ENCODER_H_ +#define WEBRTC_TEST_CONFIGURABLE_FRAME_SIZE_ENCODER_H_ + +#include + +#include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" + +namespace webrtc { +namespace test { + +class ConfigurableFrameSizeEncoder : public VideoEncoder { + public: + explicit ConfigurableFrameSizeEncoder(uint32_t max_frame_size); + virtual ~ConfigurableFrameSizeEncoder(); + + virtual int32_t InitEncode(const VideoCodec* codec_settings, + int32_t number_of_cores, + uint32_t max_payload_size) OVERRIDE; + + virtual int32_t Encode(const I420VideoFrame& input_image, + const CodecSpecificInfo* codec_specific_info, + const std::vector* frame_types) + OVERRIDE; + + virtual int32_t RegisterEncodeCompleteCallback(EncodedImageCallback* callback) + OVERRIDE; + + virtual int32_t Release() OVERRIDE; + + virtual int32_t SetChannelParameters(uint32_t packet_loss, int rtt) OVERRIDE; + + virtual int32_t SetRates(uint32_t new_bit_rate, uint32_t frame_rate) OVERRIDE; + + virtual int32_t SetPeriodicKeyFrames(bool enable) OVERRIDE; + + virtual int32_t CodecConfigParameters(uint8_t* buffer, int32_t size) OVERRIDE; + + int32_t SetFrameSize(uint32_t size); + + private: + EncodedImageCallback* callback_; + const uint32_t max_frame_size_; + uint32_t current_frame_size_; + scoped_ptr buffer_; +}; + +} // namespace test +} // namespace webrtc + +#endif // WEBRTC_TEST_CONFIGURABLE_FRAME_SIZE_ENCODER_H_ diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/direct_transport.cc b/media/webrtc/trunk/webrtc/test/direct_transport.cc similarity index 54% rename from media/webrtc/trunk/webrtc/video_engine/test/common/direct_transport.cc rename to media/webrtc/trunk/webrtc/test/direct_transport.cc index f4f364b14fb3..96852ed9c08d 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/direct_transport.cc +++ b/media/webrtc/trunk/webrtc/test/direct_transport.cc @@ -7,10 +7,12 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#include "webrtc/video_engine/test/common/direct_transport.h" +#include "webrtc/test/direct_transport.h" #include "testing/gtest/include/gtest/gtest.h" -#include "webrtc/video_engine/new_include/call.h" + +#include "webrtc/call.h" +#include "webrtc/system_wrappers/interface/clock.h" namespace webrtc { namespace test { @@ -19,8 +21,21 @@ DirectTransport::DirectTransport() : lock_(CriticalSectionWrapper::CreateCriticalSection()), packet_event_(EventWrapper::Create()), thread_(ThreadWrapper::CreateThread(NetworkProcess, this)), + clock_(Clock::GetRealTimeClock()), shutting_down_(false), - receiver_(NULL) { + fake_network_(FakeNetworkPipe::Config()) { + unsigned int thread_id; + EXPECT_TRUE(thread_->Start(thread_id)); +} + +DirectTransport::DirectTransport( + const FakeNetworkPipe::Config& config) + : lock_(CriticalSectionWrapper::CreateCriticalSection()), + packet_event_(EventWrapper::Create()), + thread_(ThreadWrapper::CreateThread(NetworkProcess, this)), + clock_(Clock::GetRealTimeClock()), + shutting_down_(false), + fake_network_(config) { unsigned int thread_id; EXPECT_TRUE(thread_->Start(thread_id)); } @@ -38,32 +53,19 @@ void DirectTransport::StopSending() { } void DirectTransport::SetReceiver(PacketReceiver* receiver) { - receiver_ = receiver; + fake_network_.SetReceiver(receiver); } -bool DirectTransport::SendRTP(const uint8_t* data, size_t length) { - QueuePacket(data, length); - return true; -} - -bool DirectTransport::SendRTCP(const uint8_t* data, size_t length) { - QueuePacket(data, length); - return true; -} - -DirectTransport::Packet::Packet() : length(0) {} - -DirectTransport::Packet::Packet(const uint8_t* data, size_t length) - : length(length) { - EXPECT_LE(length, sizeof(this->data)); - memcpy(this->data, data, length); -} - -void DirectTransport::QueuePacket(const uint8_t* data, size_t length) { - CriticalSectionScoped crit(lock_.get()); - EXPECT_TRUE(receiver_ != NULL); - packet_queue_.push_back(Packet(data, length)); +bool DirectTransport::SendRtp(const uint8_t* data, size_t length) { + fake_network_.SendPacket(data, length); packet_event_->Set(); + return true; +} + +bool DirectTransport::SendRtcp(const uint8_t* data, size_t length) { + fake_network_.SendPacket(data, length); + packet_event_->Set(); + return true; } bool DirectTransport::NetworkProcess(void* transport) { @@ -71,29 +73,20 @@ bool DirectTransport::NetworkProcess(void* transport) { } bool DirectTransport::SendPackets() { - while (true) { - Packet p; - { - CriticalSectionScoped crit(lock_.get()); - if (packet_queue_.empty()) + fake_network_.Process(); + int wait_time_ms = fake_network_.TimeUntilNextProcess(); + if (wait_time_ms > 0) { + switch (packet_event_->Wait(wait_time_ms)) { + case kEventSignaled: + packet_event_->Reset(); break; - p = packet_queue_.front(); - packet_queue_.pop_front(); + case kEventTimeout: + break; + case kEventError: + // TODO(pbos): Log a warning here? + return true; } - receiver_->DeliverPacket(p.data, p.length); } - - switch (packet_event_->Wait(WEBRTC_EVENT_INFINITE)) { - case kEventSignaled: - packet_event_->Reset(); - break; - case kEventTimeout: - break; - case kEventError: - // TODO(pbos): Log a warning here? - return true; - } - CriticalSectionScoped crit(lock_.get()); return shutting_down_ ? false : true; } diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/direct_transport.h b/media/webrtc/trunk/webrtc/test/direct_transport.h similarity index 76% rename from media/webrtc/trunk/webrtc/video_engine/test/common/direct_transport.h rename to media/webrtc/trunk/webrtc/test/direct_transport.h index f1f3dda89e54..660ffecc2ee9 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/direct_transport.h +++ b/media/webrtc/trunk/webrtc/test/direct_transport.h @@ -18,10 +18,12 @@ #include "webrtc/system_wrappers/interface/event_wrapper.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" #include "webrtc/system_wrappers/interface/thread_wrapper.h" -#include "webrtc/video_engine/new_include/transport.h" +#include "webrtc/test/fake_network_pipe.h" +#include "webrtc/transport.h" namespace webrtc { +class Clock; class PacketReceiver; namespace test { @@ -29,36 +31,27 @@ namespace test { class DirectTransport : public newapi::Transport { public: DirectTransport(); + explicit DirectTransport(const FakeNetworkPipe::Config& config); ~DirectTransport(); virtual void StopSending(); virtual void SetReceiver(PacketReceiver* receiver); - virtual bool SendRTP(const uint8_t* data, size_t length) OVERRIDE; - virtual bool SendRTCP(const uint8_t* data, size_t length) OVERRIDE; + virtual bool SendRtp(const uint8_t* data, size_t length) OVERRIDE; + virtual bool SendRtcp(const uint8_t* data, size_t length) OVERRIDE; private: - struct Packet { - Packet(); - Packet(const uint8_t* data, size_t length); - - uint8_t data[1500]; - size_t length; - }; - - void QueuePacket(const uint8_t* data, size_t length); - static bool NetworkProcess(void* transport); bool SendPackets(); scoped_ptr lock_; scoped_ptr packet_event_; scoped_ptr thread_; + Clock* clock_; bool shutting_down_; - std::deque packet_queue_; - PacketReceiver* receiver_; + FakeNetworkPipe fake_network_; }; } // namespace test } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/test/fake_audio_device.cc b/media/webrtc/trunk/webrtc/test/fake_audio_device.cc new file mode 100644 index 000000000000..a6fe165b22f5 --- /dev/null +++ b/media/webrtc/trunk/webrtc/test/fake_audio_device.cc @@ -0,0 +1,146 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/test/fake_audio_device.h" + +#include + +#include "testing/gtest/include/gtest/gtest.h" +#include "webrtc/modules/media_file/source/media_file_utility.h" +#include "webrtc/system_wrappers/interface/clock.h" +#include "webrtc/system_wrappers/interface/critical_section_wrapper.h" +#include "webrtc/system_wrappers/interface/event_wrapper.h" +#include "webrtc/system_wrappers/interface/file_wrapper.h" +#include "webrtc/system_wrappers/interface/thread_wrapper.h" + +namespace webrtc { +namespace test { + +FakeAudioDevice::FakeAudioDevice(Clock* clock, const std::string& filename) + : audio_callback_(NULL), + capturing_(false), + captured_audio_(), + playout_buffer_(), + last_playout_ms_(-1), + clock_(clock), + tick_(EventWrapper::Create()), + lock_(CriticalSectionWrapper::CreateCriticalSection()), + file_utility_(new ModuleFileUtility(0)), + input_stream_(FileWrapper::Create()) { + memset(captured_audio_, 0, sizeof(captured_audio_)); + memset(playout_buffer_, 0, sizeof(playout_buffer_)); + // Open audio input file as read-only and looping. + EXPECT_EQ(0, input_stream_->OpenFile(filename.c_str(), true, true)) + << filename; +} + +FakeAudioDevice::~FakeAudioDevice() { + Stop(); + + if (thread_.get() != NULL) + thread_->Stop(); +} + +int32_t FakeAudioDevice::Init() { + CriticalSectionScoped cs(lock_.get()); + if (file_utility_->InitPCMReading(*input_stream_.get()) != 0) + return -1; + + if (!tick_->StartTimer(true, 10)) + return -1; + thread_.reset(ThreadWrapper::CreateThread( + FakeAudioDevice::Run, this, webrtc::kHighPriority, "FakeAudioDevice")); + if (thread_.get() == NULL) + return -1; + unsigned int thread_id; + if (!thread_->Start(thread_id)) { + thread_.reset(); + return -1; + } + return 0; +} + +int32_t FakeAudioDevice::RegisterAudioCallback(AudioTransport* callback) { + CriticalSectionScoped cs(lock_.get()); + audio_callback_ = callback; + return 0; +} + +bool FakeAudioDevice::Playing() const { + CriticalSectionScoped cs(lock_.get()); + return capturing_; +} + +int32_t FakeAudioDevice::PlayoutDelay(uint16_t* delay_ms) const { + *delay_ms = 0; + return 0; +} + +bool FakeAudioDevice::Recording() const { + CriticalSectionScoped cs(lock_.get()); + return capturing_; +} + +bool FakeAudioDevice::Run(void* obj) { + static_cast(obj)->CaptureAudio(); + return true; +} + +void FakeAudioDevice::CaptureAudio() { + { + CriticalSectionScoped cs(lock_.get()); + if (capturing_) { + int bytes_read = file_utility_->ReadPCMData( + *input_stream_.get(), captured_audio_, kBufferSizeBytes); + if (bytes_read <= 0) + return; + int num_samples = bytes_read / 2; // 2 bytes per sample. + uint32_t new_mic_level; + EXPECT_EQ(0, + audio_callback_->RecordedDataIsAvailable(captured_audio_, + num_samples, + 2, + 1, + kFrequencyHz, + 0, + 0, + 0, + false, + new_mic_level)); + uint32_t samples_needed = kFrequencyHz / 100; + int64_t now_ms = clock_->TimeInMilliseconds(); + uint32_t time_since_last_playout_ms = now_ms - last_playout_ms_; + if (last_playout_ms_ > 0 && time_since_last_playout_ms > 0) + samples_needed = std::min(kFrequencyHz / time_since_last_playout_ms, + kBufferSizeBytes / 2); + uint32_t samples_out = 0; + EXPECT_EQ(0, + audio_callback_->NeedMorePlayData(samples_needed, + 2, + 1, + kFrequencyHz, + playout_buffer_, + samples_out)); + } + } + tick_->Wait(WEBRTC_EVENT_INFINITE); +} + +void FakeAudioDevice::Start() { + CriticalSectionScoped cs(lock_.get()); + capturing_ = true; +} + +void FakeAudioDevice::Stop() { + CriticalSectionScoped cs(lock_.get()); + capturing_ = false; +} +} // namespace test +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/test/fake_audio_device.h b/media/webrtc/trunk/webrtc/test/fake_audio_device.h new file mode 100644 index 000000000000..40a7547dfd68 --- /dev/null +++ b/media/webrtc/trunk/webrtc/test/fake_audio_device.h @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef WEBRTC_TEST_FAKE_AUDIO_DEVICE_H_ +#define WEBRTC_TEST_FAKE_AUDIO_DEVICE_H_ + +#include + +#include "webrtc/modules/audio_device/include/fake_audio_device.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/typedefs.h" + +namespace webrtc { + +class Clock; +class CriticalSectionWrapper; +class EventWrapper; +class FileWrapper; +class ModuleFileUtility; +class ThreadWrapper; + +namespace test { + +class FakeAudioDevice : public FakeAudioDeviceModule { + public: + FakeAudioDevice(Clock* clock, const std::string& filename); + + virtual ~FakeAudioDevice(); + + virtual int32_t Init() OVERRIDE; + virtual int32_t RegisterAudioCallback(AudioTransport* callback) OVERRIDE; + + virtual bool Playing() const OVERRIDE; + virtual int32_t PlayoutDelay(uint16_t* delay_ms) const OVERRIDE; + virtual bool Recording() const OVERRIDE; + + void Start(); + void Stop(); + + private: + static bool Run(void* obj); + void CaptureAudio(); + + static const uint32_t kFrequencyHz = 16000; + static const uint32_t kBufferSizeBytes = 2 * kFrequencyHz; + + AudioTransport* audio_callback_; + bool capturing_; + int8_t captured_audio_[kBufferSizeBytes]; + int8_t playout_buffer_[kBufferSizeBytes]; + int64_t last_playout_ms_; + + Clock* clock_; + scoped_ptr tick_; + scoped_ptr lock_; + scoped_ptr thread_; + scoped_ptr file_utility_; + scoped_ptr input_stream_; +}; +} // namespace test +} // namespace webrtc + +#endif // WEBRTC_TEST_FAKE_AUDIO_DEVICE_H_ diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/fake_decoder.cc b/media/webrtc/trunk/webrtc/test/fake_decoder.cc similarity index 96% rename from media/webrtc/trunk/webrtc/video_engine/test/common/fake_decoder.cc rename to media/webrtc/trunk/webrtc/test/fake_decoder.cc index 8125e5f399d2..a9e6f50e16af 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/fake_decoder.cc +++ b/media/webrtc/trunk/webrtc/test/fake_decoder.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "webrtc/video_engine/test/common/fake_decoder.h" +#include "webrtc/test/fake_decoder.h" #include "testing/gtest/include/gtest/gtest.h" diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/fake_decoder.h b/media/webrtc/trunk/webrtc/test/fake_decoder.h similarity index 100% rename from media/webrtc/trunk/webrtc/video_engine/test/common/fake_decoder.h rename to media/webrtc/trunk/webrtc/test/fake_decoder.h diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/fake_encoder.cc b/media/webrtc/trunk/webrtc/test/fake_encoder.cc similarity index 94% rename from media/webrtc/trunk/webrtc/video_engine/test/common/fake_encoder.cc rename to media/webrtc/trunk/webrtc/test/fake_encoder.cc index c5e58f53b436..f4e5227ed6c0 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/fake_encoder.cc +++ b/media/webrtc/trunk/webrtc/test/fake_encoder.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "webrtc/video_engine/test/common/fake_encoder.h" +#include "webrtc/test/fake_encoder.h" #include "testing/gtest/include/gtest/gtest.h" @@ -20,13 +20,16 @@ FakeEncoder::FakeEncoder(Clock* clock) callback_(NULL), target_bitrate_kbps_(0), last_encode_time_ms_(0) { - memset(encoded_buffer_, 0, sizeof(encoded_buffer_)); + // Generate some arbitrary not-all-zero data + for (size_t i = 0; i < sizeof(encoded_buffer_); ++i) { + encoded_buffer_[i] = static_cast(i); + } } FakeEncoder::~FakeEncoder() {} void FakeEncoder::SetCodecSettings(VideoCodec* codec, - size_t num_streams) { + size_t num_streams) { assert(num_streams > 0); assert(num_streams <= kMaxSimulcastStreams); @@ -57,7 +60,6 @@ void FakeEncoder::SetCodecSettings(VideoCodec* codec, codec->codecType = kVideoCodecGeneric; strcpy(codec->plName, "FAKE"); - codec->plType = 125; } int32_t FakeEncoder::InitEncode(const VideoCodec* config, @@ -100,9 +102,8 @@ int32_t FakeEncoder::Encode( int stream_bits = (bits_available > max_stream_bits) ? max_stream_bits : bits_available; int stream_bytes = (stream_bits + 7) / 8; - EXPECT_LT(static_cast(stream_bytes), sizeof(encoded_buffer_)); if (static_cast(stream_bytes) > sizeof(encoded_buffer_)) - return -1; + stream_bytes = sizeof(encoded_buffer_); EncodedImage encoded( encoded_buffer_, stream_bytes, sizeof(encoded_buffer_)); diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/fake_encoder.h b/media/webrtc/trunk/webrtc/test/fake_encoder.h similarity index 100% rename from media/webrtc/trunk/webrtc/video_engine/test/common/fake_encoder.h rename to media/webrtc/trunk/webrtc/test/fake_encoder.h diff --git a/media/webrtc/trunk/webrtc/test/fake_network_pipe.cc b/media/webrtc/trunk/webrtc/test/fake_network_pipe.cc new file mode 100644 index 000000000000..5a2424e75c5b --- /dev/null +++ b/media/webrtc/trunk/webrtc/test/fake_network_pipe.cc @@ -0,0 +1,203 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/test/fake_network_pipe.h" + +#include +#include +#include +#include + +#include "webrtc/call.h" +#include "webrtc/system_wrappers/interface/critical_section_wrapper.h" +#include "webrtc/system_wrappers/interface/tick_util.h" + +namespace webrtc { + +const double kPi = 3.14159265; +const int kDefaultProcessIntervalMs = 30; + +static int GaussianRandom(int mean_delay_ms, int standard_deviation_ms) { + // Creating a Normal distribution variable from two independent uniform + // variables based on the Box-Muller transform. + double uniform1 = (rand() + 1.0) / (RAND_MAX + 1.0); // NOLINT + double uniform2 = (rand() + 1.0) / (RAND_MAX + 1.0); // NOLINT + return static_cast(mean_delay_ms + standard_deviation_ms * + sqrt(-2 * log(uniform1)) * cos(2 * kPi * uniform2)); +} + +class NetworkPacket { + public: + NetworkPacket(const uint8_t* data, size_t length, int64_t send_time, + int64_t arrival_time) + : data_(NULL), + data_length_(length), + send_time_(send_time), + arrival_time_(arrival_time) { + data_ = new uint8_t[length]; + memcpy(data_, data, length); + } + ~NetworkPacket() { + delete [] data_; + } + + uint8_t* data() const { return data_; } + size_t data_length() const { return data_length_; } + int64_t send_time() const { return send_time_; } + int64_t arrival_time() const { return arrival_time_; } + void IncrementArrivalTime(int64_t extra_delay) { + arrival_time_+= extra_delay; + } + + private: + // The packet data. + uint8_t* data_; + // Length of data_. + size_t data_length_; + // The time the packet was sent out on the network. + const int64_t send_time_; + // The time the packet should arrive at the reciver. + int64_t arrival_time_; +}; + +FakeNetworkPipe::FakeNetworkPipe( + const FakeNetworkPipe::Config& config) + : lock_(CriticalSectionWrapper::CreateCriticalSection()), + packet_receiver_(NULL), + config_(config), + dropped_packets_(0), + sent_packets_(0), + total_packet_delay_(0), + next_process_time_(TickTime::MillisecondTimestamp()) { +} + +FakeNetworkPipe::~FakeNetworkPipe() { + while (!capacity_link_.empty()) { + delete capacity_link_.front(); + capacity_link_.pop(); + } + while (!delay_link_.empty()) { + delete delay_link_.front(); + delay_link_.pop(); + } +} + +void FakeNetworkPipe::SetReceiver(PacketReceiver* receiver) { + packet_receiver_ = receiver; +} + +void FakeNetworkPipe::SendPacket(const uint8_t* data, size_t data_length) { + // A NULL packet_receiver_ means that this pipe will terminate the flow of + // packets. + if (packet_receiver_ == NULL) + return; + CriticalSectionScoped crit(lock_.get()); + if (config_.queue_length > 0 && + capacity_link_.size() >= config_.queue_length) { + // Too many packet on the link, drop this one. + ++dropped_packets_; + return; + } + + int64_t time_now = TickTime::MillisecondTimestamp(); + + // Delay introduced by the link capacity. + int64_t capacity_delay_ms = 0; + if (config_.link_capacity_kbps > 0) + capacity_delay_ms = data_length / (config_.link_capacity_kbps / 8); + int64_t network_start_time = time_now; + + // Check if there already are packets on the link and change network start + // time if there is. + if (capacity_link_.size() > 0) + network_start_time = capacity_link_.back()->arrival_time(); + + int64_t arrival_time = network_start_time + capacity_delay_ms; + NetworkPacket* packet = new NetworkPacket(data, data_length, time_now, + arrival_time); + capacity_link_.push(packet); +} + +float FakeNetworkPipe::PercentageLoss() { + CriticalSectionScoped crit(lock_.get()); + if (sent_packets_ == 0) + return 0; + + return static_cast(dropped_packets_) / + (sent_packets_ + dropped_packets_); +} + +int FakeNetworkPipe::AverageDelay() { + CriticalSectionScoped crit(lock_.get()); + if (sent_packets_ == 0) + return 0; + + return total_packet_delay_ / static_cast(sent_packets_); +} + +void FakeNetworkPipe::Process() { + int64_t time_now = TickTime::MillisecondTimestamp(); + std::queue packets_to_deliver; + { + CriticalSectionScoped crit(lock_.get()); + // Check the capacity link first. + while (capacity_link_.size() > 0 && + time_now >= capacity_link_.front()->arrival_time()) { + // Time to get this packet. + NetworkPacket* packet = capacity_link_.front(); + capacity_link_.pop(); + + // Add extra delay and jitter, but make sure the arrival time is not + // earlier than the last packet in the queue. + int extra_delay = GaussianRandom(config_.queue_delay_ms, + config_.delay_standard_deviation_ms); + if (delay_link_.size() > 0 && + packet->arrival_time() + extra_delay < + delay_link_.back()->arrival_time()) { + extra_delay = delay_link_.back()->arrival_time() - + packet->arrival_time(); + } + packet->IncrementArrivalTime(extra_delay); + if (packet->arrival_time() < next_process_time_) + next_process_time_ = packet->arrival_time(); + delay_link_.push(packet); + } + + // Check the extra delay queue. + while (delay_link_.size() > 0 && + time_now >= delay_link_.front()->arrival_time()) { + // Deliver this packet. + NetworkPacket* packet = delay_link_.front(); + packets_to_deliver.push(packet); + delay_link_.pop(); + // |time_now| might be later than when the packet should have arrived, due + // to NetworkProcess being called too late. For stats, use the time it + // should have been on the link. + total_packet_delay_ += packet->arrival_time() - packet->send_time(); + } + sent_packets_ += packets_to_deliver.size(); + } + while (!packets_to_deliver.empty()) { + NetworkPacket* packet = packets_to_deliver.front(); + packets_to_deliver.pop(); + packet_receiver_->DeliverPacket(packet->data(), packet->data_length()); + delete packet; + } +} + +int FakeNetworkPipe::TimeUntilNextProcess() const { + CriticalSectionScoped crit(lock_.get()); + if (capacity_link_.size() == 0 || delay_link_.size() == 0) + return kDefaultProcessIntervalMs; + return std::max(static_cast(next_process_time_ - + TickTime::MillisecondTimestamp()), 0); +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/video_engine/test/libvietest/include/fake_network_pipe.h b/media/webrtc/trunk/webrtc/test/fake_network_pipe.h similarity index 60% rename from media/webrtc/trunk/webrtc/video_engine/test/libvietest/include/fake_network_pipe.h rename to media/webrtc/trunk/webrtc/test/fake_network_pipe.h index ece1472d3285..e75045701d1b 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/libvietest/include/fake_network_pipe.h +++ b/media/webrtc/trunk/webrtc/test/fake_network_pipe.h @@ -8,12 +8,13 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef WEBRTC_VIDEO_ENGINE_TEST_LIBVIETEST_INCLUDE_FAKE_NETWORK_PIPE_H_ -#define WEBRTC_VIDEO_ENGINE_TEST_LIBVIETEST_INCLUDE_FAKE_NETWORK_PIPE_H_ +#ifndef WEBRTC_TEST_FAKE_NETWORK_PIPE_H_ +#define WEBRTC_TEST_FAKE_NETWORK_PIPE_H_ #include #include "webrtc/system_wrappers/interface/constructor_magic.h" +#include "webrtc/system_wrappers/interface/event_wrapper.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" #include "webrtc/typedefs.h" @@ -21,14 +22,7 @@ namespace webrtc { class CriticalSectionWrapper; class NetworkPacket; - -class PacketReceiver { - public: - // Delivers a new packet to the receive side of the network pipe. The - // implementor of PacketReceiver now owns the memory. - virtual void IncomingPacket(uint8_t* packet, int length) = 0; - virtual ~PacketReceiver() {} -}; +class PacketReceiver; // Class faking a network link. This is a simple and naive solution just faking // capacity and adding an extra transport delay in addition to the capacity @@ -37,18 +31,15 @@ class PacketReceiver { // TODO(mflodman) Add random and bursty packet loss. class FakeNetworkPipe { public: - struct Configuration { - Configuration() - : packet_receiver(NULL), - queue_length(0), + struct Config { + Config() + : queue_length(0), queue_delay_ms(0), delay_standard_deviation_ms(0), link_capacity_kbps(0), loss_percent(0) { } - // Callback to deliver received packets. - PacketReceiver* packet_receiver; - // Queue lenght in number of packets. + // Queue length in number of packets. size_t queue_length; // Delay in addition to capacity induced delay. int queue_delay_ms; @@ -60,44 +51,45 @@ class FakeNetworkPipe { int loss_percent; }; - explicit FakeNetworkPipe(const FakeNetworkPipe::Configuration& configuration); + explicit FakeNetworkPipe(const FakeNetworkPipe::Config& config); ~FakeNetworkPipe(); + // Must not be called in parallel with SendPacket or Process. + void SetReceiver(PacketReceiver* receiver); + // Sends a new packet to the link. - void SendPacket(void* packet, int packet_length); + void SendPacket(const uint8_t* packet, size_t packet_length); // Processes the network queues and trigger PacketReceiver::IncomingPacket for // packets ready to be delivered. - void NetworkProcess(); + void Process(); + int TimeUntilNextProcess() const; // Get statistics. float PercentageLoss(); int AverageDelay(); - int dropped_packets() { return dropped_packets_; } - int sent_packets() { return sent_packets_; } + size_t dropped_packets() { return dropped_packets_; } + size_t sent_packets() { return sent_packets_; } private: + scoped_ptr lock_; PacketReceiver* packet_receiver_; - scoped_ptr link_cs_; std::queue capacity_link_; std::queue delay_link_; // Link configuration. - const size_t queue_length_; - const int queue_delay_ms_; - const int queue_delay_deviation_ms_; - const int link_capacity_bytes_ms_; // In bytes per ms. - - const int loss_percent_; + Config config_; // Statistics. - int dropped_packets_; - int sent_packets_; + size_t dropped_packets_; + size_t sent_packets_; int total_packet_delay_; + int64_t next_process_time_; + DISALLOW_COPY_AND_ASSIGN(FakeNetworkPipe); }; } // namespace webrtc -#endif // WEBRTC_VIDEO_ENGINE_TEST_LIBVIETEST_INCLUDE_FAKE_NETWORK_PIPE_H_ +#endif // WEBRTC_TEST_FAKE_NETWORK_PIPE_H_ diff --git a/media/webrtc/trunk/webrtc/video_engine/test/libvietest/testbed/fake_network_pipe_unittest.cc b/media/webrtc/trunk/webrtc/test/fake_network_pipe_unittest.cc similarity index 79% rename from media/webrtc/trunk/webrtc/video_engine/test/libvietest/testbed/fake_network_pipe_unittest.cc rename to media/webrtc/trunk/webrtc/test/fake_network_pipe_unittest.cc index 7747302d0fe1..1245f618f165 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/libvietest/testbed/fake_network_pipe_unittest.cc +++ b/media/webrtc/trunk/webrtc/test/fake_network_pipe_unittest.cc @@ -11,9 +11,10 @@ #include "testing/gmock/include/gmock/gmock.h" #include "testing/gtest/include/gtest/gtest.h" +#include "webrtc/call.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" #include "webrtc/system_wrappers/interface/tick_util.h" -#include "webrtc/video_engine/test/libvietest/include/fake_network_pipe.h" +#include "webrtc/test/fake_network_pipe.h" using ::testing::_; using ::testing::AnyNumber; @@ -27,12 +28,12 @@ class MockReceiver : public PacketReceiver { MockReceiver() {} virtual ~MockReceiver() {} - void IncomingPacket(uint8_t* data, int length) { - IncomingData(data, length); + void IncomingPacket(const uint8_t* data, size_t length) { + DeliverPacket(data, length); delete [] data; } - MOCK_METHOD2(IncomingData, void(uint8_t*, int)); + MOCK_METHOD2(DeliverPacket, bool(const uint8_t*, size_t)); }; class FakeNetworkPipeTest : public ::testing::Test { @@ -63,11 +64,11 @@ void DeleteMemory(uint8_t* data, int length) { delete [] data; } // Test the capacity link and verify we get as many packets as we expect. TEST_F(FakeNetworkPipeTest, CapacityTest) { - FakeNetworkPipe::Configuration config; - config.packet_receiver = receiver_.get(); + FakeNetworkPipe::Config config; config.queue_length = 20; config.link_capacity_kbps = 80; scoped_ptr pipe(new FakeNetworkPipe(config)); + pipe->SetReceiver(receiver_.get()); // Add 10 packets of 1000 bytes, = 80 kb, and verify it takes one second to // get through the pipe. @@ -80,37 +81,37 @@ TEST_F(FakeNetworkPipeTest, CapacityTest) { kPacketSize); // Time haven't increased yet, so we souldn't get any packets. - EXPECT_CALL(*receiver_, IncomingData(_, _)) + EXPECT_CALL(*receiver_, DeliverPacket(_, _)) .Times(0); - pipe->NetworkProcess(); + pipe->Process(); // Advance enough time to release one packet. TickTime::AdvanceFakeClock(kPacketTimeMs); - EXPECT_CALL(*receiver_, IncomingData(_, _)) + EXPECT_CALL(*receiver_, DeliverPacket(_, _)) .Times(1); - pipe->NetworkProcess(); + pipe->Process(); // Release all but one packet TickTime::AdvanceFakeClock(9 * kPacketTimeMs - 1); - EXPECT_CALL(*receiver_, IncomingData(_, _)) + EXPECT_CALL(*receiver_, DeliverPacket(_, _)) .Times(8); - pipe->NetworkProcess(); + pipe->Process(); // And the last one. TickTime::AdvanceFakeClock(1); - EXPECT_CALL(*receiver_, IncomingData(_, _)) + EXPECT_CALL(*receiver_, DeliverPacket(_, _)) .Times(1); - pipe->NetworkProcess(); + pipe->Process(); } // Test the extra network delay. TEST_F(FakeNetworkPipeTest, ExtraDelayTest) { - FakeNetworkPipe::Configuration config; - config.packet_receiver = receiver_.get(); + FakeNetworkPipe::Config config; config.queue_length = 20; config.queue_delay_ms = 100; config.link_capacity_kbps = 80; scoped_ptr pipe(new FakeNetworkPipe(config)); + pipe->SetReceiver(receiver_.get()); const int kNumPackets = 2; const int kPacketSize = 1000; @@ -122,31 +123,31 @@ TEST_F(FakeNetworkPipeTest, ExtraDelayTest) { // Increase more than kPacketTimeMs, but not more than the extra delay. TickTime::AdvanceFakeClock(kPacketTimeMs); - EXPECT_CALL(*receiver_, IncomingData(_, _)) + EXPECT_CALL(*receiver_, DeliverPacket(_, _)) .Times(0); - pipe->NetworkProcess(); + pipe->Process(); // Advance the network delay to get the first packet. TickTime::AdvanceFakeClock(config.queue_delay_ms); - EXPECT_CALL(*receiver_, IncomingData(_, _)) + EXPECT_CALL(*receiver_, DeliverPacket(_, _)) .Times(1); - pipe->NetworkProcess(); + pipe->Process(); // Advance one more kPacketTimeMs to get the last packet. TickTime::AdvanceFakeClock(kPacketTimeMs); - EXPECT_CALL(*receiver_, IncomingData(_, _)) + EXPECT_CALL(*receiver_, DeliverPacket(_, _)) .Times(1); - pipe->NetworkProcess(); + pipe->Process(); } // Test the number of buffers and packets are dropped when sending too many // packets too quickly. TEST_F(FakeNetworkPipeTest, QueueLengthTest) { - FakeNetworkPipe::Configuration config; - config.packet_receiver = receiver_.get(); + FakeNetworkPipe::Config config; config.queue_length = 2; config.link_capacity_kbps = 80; scoped_ptr pipe(new FakeNetworkPipe(config)); + pipe->SetReceiver(receiver_.get()); const int kPacketSize = 1000; const int kPacketTimeMs = PacketTimeMs(config.link_capacity_kbps, @@ -158,19 +159,19 @@ TEST_F(FakeNetworkPipeTest, QueueLengthTest) { // Increase time enough to deliver all three packets, verify only two are // delivered. TickTime::AdvanceFakeClock(3 * kPacketTimeMs); - EXPECT_CALL(*receiver_, IncomingData(_, _)) + EXPECT_CALL(*receiver_, DeliverPacket(_, _)) .Times(2); - pipe->NetworkProcess(); + pipe->Process(); } // Test we get statistics as expected. TEST_F(FakeNetworkPipeTest, StatisticsTest) { - FakeNetworkPipe::Configuration config; - config.packet_receiver = receiver_.get(); + FakeNetworkPipe::Config config; config.queue_length = 2; config.queue_delay_ms = 20; config.link_capacity_kbps = 80; scoped_ptr pipe(new FakeNetworkPipe(config)); + pipe->SetReceiver(receiver_.get()); const int kPacketSize = 1000; const int kPacketTimeMs = PacketTimeMs(config.link_capacity_kbps, @@ -180,15 +181,15 @@ TEST_F(FakeNetworkPipeTest, StatisticsTest) { SendPackets(pipe.get(), 3, kPacketSize); TickTime::AdvanceFakeClock(3 * kPacketTimeMs + config.queue_delay_ms); - EXPECT_CALL(*receiver_, IncomingData(_, _)) + EXPECT_CALL(*receiver_, DeliverPacket(_, _)) .Times(2); - pipe->NetworkProcess(); + pipe->Process(); // Packet 1: kPacketTimeMs + config.queue_delay_ms, // packet 2: 2 * kPacketTimeMs + config.queue_delay_ms => 170 ms average. EXPECT_EQ(pipe->AverageDelay(), 170); - EXPECT_EQ(pipe->sent_packets(), 2); - EXPECT_EQ(pipe->dropped_packets(), 1); + EXPECT_EQ(pipe->sent_packets(), 2u); + EXPECT_EQ(pipe->dropped_packets(), 1u); EXPECT_EQ(pipe->PercentageLoss(), 1/3.f); } diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/flags.cc b/media/webrtc/trunk/webrtc/test/flags.cc similarity index 96% rename from media/webrtc/trunk/webrtc/video_engine/test/common/flags.cc rename to media/webrtc/trunk/webrtc/test/flags.cc index ec6e8f6e7759..088efff0b87c 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/flags.cc +++ b/media/webrtc/trunk/webrtc/test/flags.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "webrtc/video_engine/test/common/flags.h" +#include "webrtc/test/flags.h" #include "gflags/gflags.h" diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/flags.h b/media/webrtc/trunk/webrtc/test/flags.h similarity index 100% rename from media/webrtc/trunk/webrtc/video_engine/test/common/flags.h rename to media/webrtc/trunk/webrtc/test/flags.h diff --git a/media/webrtc/trunk/webrtc/common_video/test/frame_generator.cc b/media/webrtc/trunk/webrtc/test/frame_generator.cc similarity index 54% rename from media/webrtc/trunk/webrtc/common_video/test/frame_generator.cc rename to media/webrtc/trunk/webrtc/test/frame_generator.cc index 2f93d7ab61b8..4dd76aeea6de 100644 --- a/media/webrtc/trunk/webrtc/common_video/test/frame_generator.cc +++ b/media/webrtc/trunk/webrtc/test/frame_generator.cc @@ -7,9 +7,11 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#include "webrtc/common_video/test/frame_generator.h" +#include "webrtc/test/frame_generator.h" +#include #include +#include #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" @@ -17,6 +19,37 @@ namespace webrtc { namespace test { namespace { +class ChromaGenerator : public FrameGenerator { + public: + ChromaGenerator(size_t width, size_t height) + : angle_(0.0), width_(width), height_(height) { + assert(width > 0); + assert(height > 0); + } + + virtual I420VideoFrame* NextFrame() OVERRIDE { + frame_.CreateEmptyFrame(static_cast(width_), + static_cast(height_), + static_cast(width_), + static_cast((width_ + 1) / 2), + static_cast((width_ + 1) / 2)); + angle_ += 30.0; + uint8_t u = fabs(sin(angle_)) * 0xFF; + uint8_t v = fabs(cos(angle_)) * 0xFF; + + memset(frame_.buffer(kYPlane), 0x80, frame_.allocated_size(kYPlane)); + memset(frame_.buffer(kUPlane), u, frame_.allocated_size(kUPlane)); + memset(frame_.buffer(kVPlane), v, frame_.allocated_size(kVPlane)); + return &frame_; + } + + private: + double angle_; + size_t width_; + size_t height_; + I420VideoFrame frame_; +}; + class YuvFileGenerator : public FrameGenerator { public: YuvFileGenerator(FILE* file, size_t width, size_t height) @@ -27,11 +60,6 @@ class YuvFileGenerator : public FrameGenerator { frame_size_ = CalcBufferSize( kI420, static_cast(width_), static_cast(height_)); frame_buffer_ = new uint8_t[frame_size_]; - frame_.CreateEmptyFrame(static_cast(width), - static_cast(height), - static_cast(width), - static_cast((width + 1) / 2), - static_cast((width + 1) / 2)); } virtual ~YuvFileGenerator() { @@ -39,13 +67,19 @@ class YuvFileGenerator : public FrameGenerator { delete[] frame_buffer_; } - virtual I420VideoFrame& NextFrame() OVERRIDE { + virtual I420VideoFrame* NextFrame() OVERRIDE { size_t count = fread(frame_buffer_, 1, frame_size_, file_); if (count < frame_size_) { rewind(file_); return NextFrame(); } + frame_.CreateEmptyFrame(static_cast(width_), + static_cast(height_), + static_cast(width_), + static_cast((width_ + 1) / 2), + static_cast((width_ + 1) / 2)); + ConvertToI420(kI420, frame_buffer_, 0, @@ -55,7 +89,7 @@ class YuvFileGenerator : public FrameGenerator { 0, kRotateNone, &frame_); - return frame_; + return &frame_; } private: @@ -68,10 +102,14 @@ class YuvFileGenerator : public FrameGenerator { }; } // namespace +FrameGenerator* FrameGenerator::Create(size_t width, size_t height) { + return new ChromaGenerator(width, height); +} + FrameGenerator* FrameGenerator::CreateFromYuvFile(const char* file, size_t width, size_t height) { - FILE* file_handle = fopen(file, "r"); + FILE* file_handle = fopen(file, "rb"); assert(file_handle); return new YuvFileGenerator(file_handle, width, height); } diff --git a/media/webrtc/trunk/webrtc/common_video/test/frame_generator.h b/media/webrtc/trunk/webrtc/test/frame_generator.h similarity index 91% rename from media/webrtc/trunk/webrtc/common_video/test/frame_generator.h rename to media/webrtc/trunk/webrtc/test/frame_generator.h index 193ad480f4fd..fe10612fb5c3 100644 --- a/media/webrtc/trunk/webrtc/common_video/test/frame_generator.h +++ b/media/webrtc/trunk/webrtc/test/frame_generator.h @@ -22,8 +22,9 @@ class FrameGenerator { virtual ~FrameGenerator() {} // Returns video frame that remains valid until next call. - virtual I420VideoFrame& NextFrame() = 0; + virtual I420VideoFrame* NextFrame() = 0; + static FrameGenerator* Create(size_t width, size_t height); static FrameGenerator* CreateFromYuvFile(const char* file, size_t width, size_t height); diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/frame_generator_capturer.cc b/media/webrtc/trunk/webrtc/test/frame_generator_capturer.cc similarity index 66% rename from media/webrtc/trunk/webrtc/video_engine/test/common/frame_generator_capturer.cc rename to media/webrtc/trunk/webrtc/test/frame_generator_capturer.cc index 96a7af86a69e..6570c6f1540a 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/frame_generator_capturer.cc +++ b/media/webrtc/trunk/webrtc/test/frame_generator_capturer.cc @@ -8,51 +8,18 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "webrtc/video_engine/test/common/frame_generator_capturer.h" +#include "webrtc/test/frame_generator_capturer.h" -#include -#include - -#include "webrtc/common_video/test/frame_generator.h" +#include "webrtc/test/frame_generator.h" #include "webrtc/system_wrappers/interface/clock.h" #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" #include "webrtc/system_wrappers/interface/event_wrapper.h" #include "webrtc/system_wrappers/interface/sleep.h" #include "webrtc/system_wrappers/interface/thread_wrapper.h" -#include "webrtc/video_engine/new_include/video_send_stream.h" +#include "webrtc/video_send_stream.h" namespace webrtc { namespace test { -namespace { -class ChromaGenerator : public FrameGenerator { - public: - ChromaGenerator(size_t width, size_t height, Clock* clock) : clock_(clock) { - assert(width > 0); - assert(height > 0); - frame_.CreateEmptyFrame(static_cast(width), - static_cast(height), - static_cast(width), - static_cast((width + 1) / 2), - static_cast((width + 1) / 2)); - memset(frame_.buffer(kYPlane), 0x80, frame_.allocated_size(kYPlane)); - } - - virtual I420VideoFrame& NextFrame() OVERRIDE { - double angle = - static_cast(clock_->CurrentNtpInMilliseconds()) / 1000.0; - uint8_t u = fabs(sin(angle)) * 0xFF; - uint8_t v = fabs(cos(angle)) * 0xFF; - - memset(frame_.buffer(kUPlane), u, frame_.allocated_size(kUPlane)); - memset(frame_.buffer(kVPlane), v, frame_.allocated_size(kVPlane)); - return frame_; - } - - private: - Clock* clock_; - I420VideoFrame frame_; -}; -} // namespace FrameGeneratorCapturer* FrameGeneratorCapturer::Create( VideoSendStreamInput* input, @@ -61,7 +28,7 @@ FrameGeneratorCapturer* FrameGeneratorCapturer::Create( int target_fps, Clock* clock) { FrameGeneratorCapturer* capturer = new FrameGeneratorCapturer( - clock, input, new ChromaGenerator(width, height, clock), target_fps); + clock, input, FrameGenerator::Create(width, height), target_fps); if (!capturer->Init()) { delete capturer; return NULL; @@ -99,7 +66,6 @@ FrameGeneratorCapturer::FrameGeneratorCapturer(Clock* clock, sending_(false), tick_(EventWrapper::Create()), lock_(CriticalSectionWrapper::CreateCriticalSection()), - thread_(NULL), frame_generator_(frame_generator), target_fps_(target_fps) { assert(input != NULL); @@ -115,6 +81,11 @@ FrameGeneratorCapturer::~FrameGeneratorCapturer() { } bool FrameGeneratorCapturer::Init() { + // This check is added because frame_generator_ might be file based and should + // not crash because a file moved. + if (frame_generator_.get() == NULL) + return false; + if (!tick_->StartTimer(true, 1000 / target_fps_)) return false; thread_.reset(ThreadWrapper::CreateThread(FrameGeneratorCapturer::Run, @@ -140,11 +111,9 @@ void FrameGeneratorCapturer::InsertFrame() { { CriticalSectionScoped cs(lock_.get()); if (sending_) { - int64_t time_before = clock_->CurrentNtpInMilliseconds(); - I420VideoFrame& frame = frame_generator_->NextFrame(); - frame.set_render_time_ms(time_before); - int64_t time_after = clock_->CurrentNtpInMilliseconds(); - input_->PutFrame(frame, static_cast(time_after - time_before)); + I420VideoFrame* frame = frame_generator_->NextFrame(); + frame->set_render_time_ms(clock_->CurrentNtpInMilliseconds()); + input_->SwapFrame(frame); } } tick_->Wait(WEBRTC_EVENT_INFINITE); diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/frame_generator_capturer.h b/media/webrtc/trunk/webrtc/test/frame_generator_capturer.h similarity index 97% rename from media/webrtc/trunk/webrtc/video_engine/test/common/frame_generator_capturer.h rename to media/webrtc/trunk/webrtc/test/frame_generator_capturer.h index ad7fdebb0a9c..5be6bb2cb1b8 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/frame_generator_capturer.h +++ b/media/webrtc/trunk/webrtc/test/frame_generator_capturer.h @@ -11,8 +11,8 @@ #define WEBRTC_VIDEO_ENGINE_TEST_COMMON_FRAME_GENERATOR_CAPTURER_H_ #include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/test/video_capturer.h" #include "webrtc/typedefs.h" -#include "webrtc/video_engine/test/common/video_capturer.h" namespace webrtc { diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/gl/gl_renderer.cc b/media/webrtc/trunk/webrtc/test/gl/gl_renderer.cc similarity index 97% rename from media/webrtc/trunk/webrtc/video_engine/test/common/gl/gl_renderer.cc rename to media/webrtc/trunk/webrtc/test/gl/gl_renderer.cc index 5e229eb33175..f2c7acd6c006 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/gl/gl_renderer.cc +++ b/media/webrtc/trunk/webrtc/test/gl/gl_renderer.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "webrtc/video_engine/test/common/gl/gl_renderer.h" +#include "webrtc/test/gl/gl_renderer.h" #include diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/gl/gl_renderer.h b/media/webrtc/trunk/webrtc/test/gl/gl_renderer.h similarity index 95% rename from media/webrtc/trunk/webrtc/video_engine/test/common/gl/gl_renderer.h rename to media/webrtc/trunk/webrtc/test/gl/gl_renderer.h index 033f1cb1d828..c2110b2e74ad 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/gl/gl_renderer.h +++ b/media/webrtc/trunk/webrtc/test/gl/gl_renderer.h @@ -17,8 +17,8 @@ #include #endif +#include "webrtc/test/video_renderer.h" #include "webrtc/typedefs.h" -#include "webrtc/video_engine/test/common/video_renderer.h" namespace webrtc { diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/linux/glx_renderer.cc b/media/webrtc/trunk/webrtc/test/linux/glx_renderer.cc similarity index 98% rename from media/webrtc/trunk/webrtc/video_engine/test/common/linux/glx_renderer.cc rename to media/webrtc/trunk/webrtc/test/linux/glx_renderer.cc index 312cb119a697..c0b6130c25e5 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/linux/glx_renderer.cc +++ b/media/webrtc/trunk/webrtc/test/linux/glx_renderer.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "webrtc/video_engine/test/common/linux/glx_renderer.h" +#include "webrtc/test/linux/glx_renderer.h" #include diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/linux/glx_renderer.h b/media/webrtc/trunk/webrtc/test/linux/glx_renderer.h similarity index 95% rename from media/webrtc/trunk/webrtc/video_engine/test/common/linux/glx_renderer.h rename to media/webrtc/trunk/webrtc/test/linux/glx_renderer.h index 5a2f562a6121..152b91530f8c 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/linux/glx_renderer.h +++ b/media/webrtc/trunk/webrtc/test/linux/glx_renderer.h @@ -14,8 +14,8 @@ #include #include +#include "webrtc/test/gl/gl_renderer.h" #include "webrtc/typedefs.h" -#include "webrtc/video_engine/test/common/gl/gl_renderer.h" namespace webrtc { namespace test { diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/linux/video_renderer_linux.cc b/media/webrtc/trunk/webrtc/test/linux/video_renderer_linux.cc similarity index 87% rename from media/webrtc/trunk/webrtc/video_engine/test/common/linux/video_renderer_linux.cc rename to media/webrtc/trunk/webrtc/test/linux/video_renderer_linux.cc index 2036a6ca7ad0..6f69dd749804 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/linux/video_renderer_linux.cc +++ b/media/webrtc/trunk/webrtc/test/linux/video_renderer_linux.cc @@ -7,9 +7,9 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#include "webrtc/video_engine/test/common/video_renderer.h" +#include "webrtc/test/video_renderer.h" -#include "webrtc/video_engine/test/common/linux/glx_renderer.h" +#include "webrtc/test/linux/glx_renderer.h" namespace webrtc { namespace test { diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/mac/run_tests.mm b/media/webrtc/trunk/webrtc/test/mac/run_tests.mm similarity index 100% rename from media/webrtc/trunk/webrtc/video_engine/test/common/mac/run_tests.mm rename to media/webrtc/trunk/webrtc/test/mac/run_tests.mm diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/mac/video_renderer_mac.h b/media/webrtc/trunk/webrtc/test/mac/video_renderer_mac.h similarity index 94% rename from media/webrtc/trunk/webrtc/video_engine/test/common/mac/video_renderer_mac.h rename to media/webrtc/trunk/webrtc/test/mac/video_renderer_mac.h index f5e271da2344..7701840c031f 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/mac/video_renderer_mac.h +++ b/media/webrtc/trunk/webrtc/test/mac/video_renderer_mac.h @@ -11,8 +11,8 @@ #ifndef WEBRTC_VIDEO_ENGINE_TEST_COMMON_MAC_VIDEO_RENDERER_MAC_H_ #define WEBRTC_VIDEO_ENGINE_TEST_COMMON_MAC_VIDEO_RENDERER_MAC_H_ +#include "webrtc/test/gl/gl_renderer.h" #include "webrtc/system_wrappers/interface/constructor_magic.h" -#include "webrtc/video_engine/test/common/gl/gl_renderer.h" @class CocoaWindow; diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/mac/video_renderer_mac.mm b/media/webrtc/trunk/webrtc/test/mac/video_renderer_mac.mm similarity index 98% rename from media/webrtc/trunk/webrtc/video_engine/test/common/mac/video_renderer_mac.mm rename to media/webrtc/trunk/webrtc/test/mac/video_renderer_mac.mm index ebbe02cc17a6..1bc05a190e73 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/mac/video_renderer_mac.mm +++ b/media/webrtc/trunk/webrtc/test/mac/video_renderer_mac.mm @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "webrtc/video_engine/test/common/mac/video_renderer_mac.h" +#include "webrtc/test/mac/video_renderer_mac.h" #import diff --git a/media/webrtc/trunk/webrtc/test/metrics.gyp b/media/webrtc/trunk/webrtc/test/metrics.gyp index 3660becfdb5d..0486a7c9ea8e 100644 --- a/media/webrtc/trunk/webrtc/test/metrics.gyp +++ b/media/webrtc/trunk/webrtc/test/metrics.gyp @@ -23,67 +23,10 @@ '<(webrtc_root)/common_video/common_video.gyp:common_video', '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', ], - 'include_dirs': [ - '.', - ], 'sources': [ 'testsupport/metrics/video_metrics.h', 'testsupport/metrics/video_metrics.cc', ], }, - { - 'target_name': 'metrics_unittests', - 'type': '<(gtest_target_type)', - 'dependencies': [ - 'metrics', - '<(webrtc_root)/test/test.gyp:test_support_main', - '<(DEPTH)/testing/gtest.gyp:gtest', - ], - 'sources': [ - 'testsupport/metrics/video_metrics_unittest.cc', - ], - 'conditions': [ - # TODO(henrike): remove build_with_chromium==1 when the bots are - # using Chromium's buildbots. - ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', { - 'dependencies': [ - '<(DEPTH)/testing/android/native_test.gyp:native_test_native_code', - ], - }], - ], - }, ], # targets - 'conditions': [ - # TODO(henrike): remove build_with_chromium==1 when the bots are using - # Chromium's buildbots. - ['include_tests==1 and build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', { - 'targets': [ - { - 'target_name': 'metrics_unittests_apk_target', - 'type': 'none', - 'dependencies': [ - '<(apk_tests_path):metrics_unittests_apk', - ], - }, - ], - }], - ['test_isolation_mode != "noop"', { - 'targets': [ - { - 'target_name': 'metrics_unittests_run', - 'type': 'none', - 'dependencies': [ - '<(import_isolate_path):import_isolate_gypi', - 'metrics_unittests', - ], - 'includes': [ - 'metrics_unittests.isolate', - ], - 'sources': [ - 'metrics_unittests.isolate', - ], - }, - ], - }], - ], } diff --git a/media/webrtc/trunk/webrtc/test/mock_transport.h b/media/webrtc/trunk/webrtc/test/mock_transport.h new file mode 100644 index 000000000000..388ee92e9d43 --- /dev/null +++ b/media/webrtc/trunk/webrtc/test/mock_transport.h @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_TEST_MOCK_TRANSPORT_H_ +#define WEBRTC_TEST_MOCK_TRANSPORT_H_ + +#include "testing/gmock/include/gmock/gmock.h" +#include "webrtc/transport.h" + +namespace webrtc { + +class MockTransport : public webrtc::Transport { + public: + MOCK_METHOD3(SendPacket, + int(int channel, const void* data, int len)); + MOCK_METHOD3(SendRTCPPacket, + int(int channel, const void* data, int len)); +}; +} // namespace webrtc +#endif // WEBRTC_TEST_MOCK_TRANSPORT_H_ diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/null_platform_renderer.cc b/media/webrtc/trunk/webrtc/test/null_platform_renderer.cc similarity index 92% rename from media/webrtc/trunk/webrtc/video_engine/test/common/null_platform_renderer.cc rename to media/webrtc/trunk/webrtc/test/null_platform_renderer.cc index a79ee1d793be..362f7db762dd 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/null_platform_renderer.cc +++ b/media/webrtc/trunk/webrtc/test/null_platform_renderer.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "webrtc/video_engine/test/common/video_renderer.h" +#include "webrtc/test/video_renderer.h" namespace webrtc { namespace test { diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/null_transport.cc b/media/webrtc/trunk/webrtc/test/null_transport.cc similarity index 74% rename from media/webrtc/trunk/webrtc/video_engine/test/common/null_transport.cc rename to media/webrtc/trunk/webrtc/test/null_transport.cc index 1ce8e009a4da..3cba6386cd53 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/null_transport.cc +++ b/media/webrtc/trunk/webrtc/test/null_transport.cc @@ -7,16 +7,16 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#include "webrtc/video_engine/test/common/null_transport.h" +#include "webrtc/test/null_transport.h" namespace webrtc { namespace test { -bool NullTransport::SendRTP(const uint8_t* packet, size_t length) { +bool NullTransport::SendRtp(const uint8_t* packet, size_t length) { return true; } -bool NullTransport::SendRTCP(const uint8_t* packet, size_t length) { +bool NullTransport::SendRtcp(const uint8_t* packet, size_t length) { return true; } diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/null_transport.h b/media/webrtc/trunk/webrtc/test/null_transport.h similarity index 80% rename from media/webrtc/trunk/webrtc/video_engine/test/common/null_transport.h rename to media/webrtc/trunk/webrtc/test/null_transport.h index b63ad4077d33..e8d4d10275ef 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/null_transport.h +++ b/media/webrtc/trunk/webrtc/test/null_transport.h @@ -10,7 +10,7 @@ #ifndef WEBRTC_VIDEO_ENGINE_TEST_COMMON_NULL_TRANSPORT_H_ #define WEBRTC_VIDEO_ENGINE_TEST_COMMON_NULL_TRANSPORT_H_ -#include "webrtc/video_engine/new_include/transport.h" +#include "webrtc/transport.h" namespace webrtc { @@ -19,8 +19,8 @@ class PacketReceiver; namespace test { class NullTransport : public newapi::Transport { public: - virtual bool SendRTP(const uint8_t* packet, size_t length) OVERRIDE; - virtual bool SendRTCP(const uint8_t* packet, size_t length) OVERRIDE; + virtual bool SendRtp(const uint8_t* packet, size_t length) OVERRIDE; + virtual bool SendRtcp(const uint8_t* packet, size_t length) OVERRIDE; }; } // namespace test } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/rtp_rtcp_observer.h b/media/webrtc/trunk/webrtc/test/rtp_rtcp_observer.h similarity index 64% rename from media/webrtc/trunk/webrtc/video_engine/test/common/rtp_rtcp_observer.h rename to media/webrtc/trunk/webrtc/test/rtp_rtcp_observer.h index 2ceddf35f887..5ed9a3f3eb15 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/rtp_rtcp_observer.h +++ b/media/webrtc/trunk/webrtc/test/rtp_rtcp_observer.h @@ -13,14 +13,18 @@ #include #include +#include "testing/gtest/include/gtest/gtest.h" + +#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h" #include "webrtc/typedefs.h" -#include "webrtc/video_engine/new_include/video_send_stream.h" +#include "webrtc/video_send_stream.h" namespace webrtc { namespace test { class RtpRtcpObserver { public: + virtual ~RtpRtcpObserver() {} newapi::Transport* SendTransport() { return &send_transport_; } @@ -40,20 +44,44 @@ class RtpRtcpObserver { receive_transport_.StopSending(); } - EventTypeWrapper Wait() { return observation_complete_->Wait(timeout_ms_); } + virtual EventTypeWrapper Wait() { + EventTypeWrapper result = observation_complete_->Wait(timeout_ms_); + observation_complete_->Reset(); + return result; + } protected: - RtpRtcpObserver(unsigned int event_timeout_ms) + RtpRtcpObserver(unsigned int event_timeout_ms, + const FakeNetworkPipe::Config& configuration) : lock_(CriticalSectionWrapper::CreateCriticalSection()), observation_complete_(EventWrapper::Create()), + parser_(RtpHeaderParser::Create()), send_transport_(lock_.get(), this, &RtpRtcpObserver::OnSendRtp, - &RtpRtcpObserver::OnSendRtcp), + &RtpRtcpObserver::OnSendRtcp, + configuration), receive_transport_(lock_.get(), this, &RtpRtcpObserver::OnReceiveRtp, - &RtpRtcpObserver::OnReceiveRtcp), + &RtpRtcpObserver::OnReceiveRtcp, + configuration), + timeout_ms_(event_timeout_ms) {} + + explicit RtpRtcpObserver(unsigned int event_timeout_ms) + : lock_(CriticalSectionWrapper::CreateCriticalSection()), + observation_complete_(EventWrapper::Create()), + parser_(RtpHeaderParser::Create()), + send_transport_(lock_.get(), + this, + &RtpRtcpObserver::OnSendRtp, + &RtpRtcpObserver::OnSendRtcp, + FakeNetworkPipe::Config()), + receive_transport_(lock_.get(), + this, + &RtpRtcpObserver::OnReceiveRtp, + &RtpRtcpObserver::OnReceiveRtcp, + FakeNetworkPipe::Config()), timeout_ms_(event_timeout_ms) {} enum Action { @@ -83,17 +111,21 @@ class RtpRtcpObserver { public: typedef Action (RtpRtcpObserver::*PacketTransportAction)(const uint8_t*, size_t); + PacketTransport(CriticalSectionWrapper* lock, RtpRtcpObserver* observer, PacketTransportAction on_rtp, - PacketTransportAction on_rtcp) - : lock_(lock), + PacketTransportAction on_rtcp, + const FakeNetworkPipe::Config& configuration) + : test::DirectTransport(configuration), + lock_(lock), observer_(observer), on_rtp_(on_rtp), on_rtcp_(on_rtcp) {} private: - virtual bool SendRTP(const uint8_t* packet, size_t length) OVERRIDE { + virtual bool SendRtp(const uint8_t* packet, size_t length) OVERRIDE { + EXPECT_FALSE(RtpHeaderParser::IsRtcp(packet, static_cast(length))); Action action; { CriticalSectionScoped crit_(lock_); @@ -104,12 +136,13 @@ class RtpRtcpObserver { // Drop packet silently. return true; case SEND_PACKET: - return test::DirectTransport::SendRTP(packet, length); + return test::DirectTransport::SendRtp(packet, length); } return true; // Will never happen, makes compiler happy. } - virtual bool SendRTCP(const uint8_t* packet, size_t length) OVERRIDE { + virtual bool SendRtcp(const uint8_t* packet, size_t length) OVERRIDE { + EXPECT_TRUE(RtpHeaderParser::IsRtcp(packet, static_cast(length))); Action action; { CriticalSectionScoped crit_(lock_); @@ -120,7 +153,7 @@ class RtpRtcpObserver { // Drop packet silently. return true; case SEND_PACKET: - return test::DirectTransport::SendRTCP(packet, length); + return test::DirectTransport::SendRtcp(packet, length); } return true; // Will never happen, makes compiler happy. } @@ -135,6 +168,7 @@ class RtpRtcpObserver { protected: scoped_ptr lock_; scoped_ptr observation_complete_; + scoped_ptr parser_; private: PacketTransport send_transport_, receive_transport_; diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/run_loop.h b/media/webrtc/trunk/webrtc/test/run_loop.h similarity index 100% rename from media/webrtc/trunk/webrtc/video_engine/test/common/run_loop.h rename to media/webrtc/trunk/webrtc/test/run_loop.h diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/run_tests.h b/media/webrtc/trunk/webrtc/test/run_tests.h similarity index 100% rename from media/webrtc/trunk/webrtc/video_engine/test/common/run_tests.h rename to media/webrtc/trunk/webrtc/test/run_tests.h diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/statistics.cc b/media/webrtc/trunk/webrtc/test/statistics.cc similarity index 94% rename from media/webrtc/trunk/webrtc/video_engine/test/common/statistics.cc rename to media/webrtc/trunk/webrtc/test/statistics.cc index f6e339d9669d..0075d4c9a363 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/statistics.cc +++ b/media/webrtc/trunk/webrtc/test/statistics.cc @@ -7,7 +7,7 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#include "webrtc/video_engine/test/common/statistics.h" +#include "webrtc/test/statistics.h" #include diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/statistics.h b/media/webrtc/trunk/webrtc/test/statistics.h similarity index 100% rename from media/webrtc/trunk/webrtc/video_engine/test/common/statistics.h rename to media/webrtc/trunk/webrtc/test/statistics.h diff --git a/media/webrtc/trunk/webrtc/test/test.gyp b/media/webrtc/trunk/webrtc/test/test.gyp index 4e01b25d765d..0051bcee95c7 100644 --- a/media/webrtc/trunk/webrtc/test/test.gyp +++ b/media/webrtc/trunk/webrtc/test/test.gyp @@ -41,6 +41,17 @@ 'channel_transport/udp_transport_impl.h', ], }, + { + 'target_name': 'frame_generator', + 'type': 'static_library', + 'sources': [ + 'frame_generator.cc', + 'frame_generator.h', + ], + 'dependencies': [ + '<(webrtc_root)/common_video/common_video.gyp:common_video', + ], + }, { 'target_name': 'test_support', 'type': 'static_library', @@ -82,21 +93,6 @@ 'sources!': [ 'testsupport/android/root_path_android.cc', ], - # WebRTC tests use resource files for testing. These files are not - # hosted in WebRTC. The script ensures that the needed resources - # are downloaded. In stand alone WebRTC the script is called by - # the DEPS file. In Chromium, i.e. here, the files are pulled down - # only if tests requiring the resources are being built. - 'actions': [ - { - 'action_name': 'get_resources', - 'inputs': ['<(webrtc_root)/tools/update_resources.py'], - 'outputs': ['../../../resources'], - 'action': ['python', - '<(webrtc_root)/tools/update_resources.py', - '-p', - '../../../'], - }], }, { 'sources!': [ 'testsupport/android/root_path_android_chromium.cc', @@ -167,28 +163,32 @@ }], ], }, - { - 'target_name': 'buildbot_tests_scripts', - 'type': 'none', - 'copies': [ - { - 'destination': '<(PRODUCT_DIR)', - 'files': [ - 'buildbot_tests.py', - '<(DEPTH)/tools/e2e_quality/audio/run_audio_test.py', - ], - }, - { - 'destination': '<(PRODUCT_DIR)/perf', - 'files': [ - '<(DEPTH)/tools/perf/__init__.py', - '<(DEPTH)/tools/perf/perf_utils.py', - ], - }, - ], - }, # target buildbot_tests_scripts ], 'conditions': [ + ['build_with_chromium==0', { + 'targets': [ + { + 'target_name': 'buildbot_tests_scripts', + 'type': 'none', + 'copies': [ + { + 'destination': '<(PRODUCT_DIR)', + 'files': [ + 'buildbot_tests.py', + '<(webrtc_root)/tools/e2e_quality/audio/run_audio_test.py', + ], + }, + { + 'destination': '<(PRODUCT_DIR)/perf', + 'files': [ + '<(DEPTH)/tools/perf/__init__.py', + '<(DEPTH)/tools/perf/perf_utils.py', + ], + }, + ], + }, # target buildbot_tests_scripts + ], + }], # TODO(henrike): remove build_with_chromium==1 when the bots are using # Chromium's buildbots. ['include_tests==1 and build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', { @@ -208,10 +208,10 @@ 'target_name': 'test_support_unittests_run', 'type': 'none', 'dependencies': [ - '<(import_isolate_path):import_isolate_gypi', 'test_support_unittests', ], 'includes': [ + '../build/isolate.gypi', 'test_support_unittests.isolate', ], 'sources': [ diff --git a/media/webrtc/trunk/webrtc/video_engine/test/test_main.cc b/media/webrtc/trunk/webrtc/test/test_main.cc similarity index 87% rename from media/webrtc/trunk/webrtc/video_engine/test/test_main.cc rename to media/webrtc/trunk/webrtc/test/test_main.cc index 68bdd4a3a666..b57b032daf99 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/test_main.cc +++ b/media/webrtc/trunk/webrtc/test/test_main.cc @@ -10,9 +10,9 @@ #include "testing/gtest/include/gtest/gtest.h" +#include "webrtc/test/flags.h" +#include "webrtc/test/run_tests.h" #include "webrtc/test/testsupport/fileutils.h" -#include "webrtc/video_engine/test/common/flags.h" -#include "webrtc/video_engine/test/common/run_tests.h" int main(int argc, char* argv[]) { ::testing::InitGoogleTest(&argc, argv); diff --git a/media/webrtc/trunk/webrtc/test/test_suite.cc b/media/webrtc/trunk/webrtc/test/test_suite.cc index c8ff742c64fb..7cfb856f3f75 100644 --- a/media/webrtc/trunk/webrtc/test/test_suite.cc +++ b/media/webrtc/trunk/webrtc/test/test_suite.cc @@ -21,8 +21,7 @@ DEFINE_bool(logs, false, "print logs to stderr"); namespace webrtc { namespace test { -TestSuite::TestSuite(int argc, char** argv) - : trace_to_stderr_(NULL) { +TestSuite::TestSuite(int argc, char** argv) { SetExecutablePath(argv[0]); testing::InitGoogleMock(&argc, argv); // Runs InitGoogleTest() internally. // AllowCommandLineParsing allows us to ignore flags passed on to us by diff --git a/media/webrtc/trunk/webrtc/test/test_support_unittests.isolate b/media/webrtc/trunk/webrtc/test/test_support_unittests.isolate index 3384ab8c7a71..0f5de6507944 100644 --- a/media/webrtc/trunk/webrtc/test/test_support_unittests.isolate +++ b/media/webrtc/trunk/webrtc/test/test_support_unittests.isolate @@ -21,17 +21,16 @@ 'variables': { 'command': [ '../../testing/test_env.py', - '../../tools/swarm_client/googletest/run_test_cases.py', '<(PRODUCT_DIR)/test_support_unittests<(EXECUTABLE_SUFFIX)', ], 'isolate_dependency_tracked': [ '../../DEPS', '../../testing/test_env.py', - '../../tools/swarm_client/run_isolated.py', - '../../tools/swarm_client/googletest/run_test_cases.py', - '../../tools/swarm_client/third_party/upload.py', '<(PRODUCT_DIR)/test_support_unittests<(EXECUTABLE_SUFFIX)', ], + 'isolate_dependency_untracked': [ + '../../tools/swarming_client/', + ], }, }], ], diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/vcm_capturer.cc b/media/webrtc/trunk/webrtc/test/vcm_capturer.cc similarity index 85% rename from media/webrtc/trunk/webrtc/video_engine/test/common/vcm_capturer.cc rename to media/webrtc/trunk/webrtc/test/vcm_capturer.cc index dc92b89f17c8..a5820bfe1195 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/vcm_capturer.cc +++ b/media/webrtc/trunk/webrtc/test/vcm_capturer.cc @@ -8,16 +8,16 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "webrtc/video_engine/test/common/vcm_capturer.h" +#include "webrtc/test/vcm_capturer.h" #include "webrtc/modules/video_capture/include/video_capture_factory.h" -#include "webrtc/video_engine/new_include/video_send_stream.h" +#include "webrtc/video_send_stream.h" namespace webrtc { namespace test { VcmCapturer::VcmCapturer(webrtc::VideoSendStreamInput* input) - : VideoCapturer(input), started_(false), vcm_(NULL), last_timestamp_(0) {} + : VideoCapturer(input), started_(false), vcm_(NULL) {} bool VcmCapturer::Init(size_t width, size_t height, size_t target_fps) { VideoCaptureModule::DeviceInfo* device_info = @@ -88,14 +88,8 @@ VcmCapturer::~VcmCapturer() { Destroy(); } void VcmCapturer::OnIncomingCapturedFrame(const int32_t id, I420VideoFrame& frame) { - if (last_timestamp_ == 0 || frame.timestamp() < last_timestamp_) { - last_timestamp_ = frame.timestamp(); - } - - if (started_) { - input_->PutFrame(frame, frame.timestamp() - last_timestamp_); - } - last_timestamp_ = frame.timestamp(); + if (started_) + input_->SwapFrame(&frame); } void VcmCapturer::OnCaptureDelayChanged(const int32_t id, const int32_t delay) { diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/vcm_capturer.h b/media/webrtc/trunk/webrtc/test/vcm_capturer.h similarity index 94% rename from media/webrtc/trunk/webrtc/video_engine/test/common/vcm_capturer.h rename to media/webrtc/trunk/webrtc/test/vcm_capturer.h index 5b1df9ce94c4..dde3edc2f7cc 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/vcm_capturer.h +++ b/media/webrtc/trunk/webrtc/test/vcm_capturer.h @@ -13,7 +13,7 @@ #include "webrtc/common_types.h" #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" #include "webrtc/modules/video_capture/include/video_capture.h" -#include "webrtc/video_engine/test/common/video_capturer.h" +#include "webrtc/test/video_capturer.h" namespace webrtc { namespace test { @@ -40,8 +40,6 @@ class VcmCapturer : public VideoCapturer, public VideoCaptureDataCallback { bool started_; VideoCaptureModule* vcm_; VideoCaptureCapability capability_; - - uint32_t last_timestamp_; }; } // test } // webrtc diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/video_capturer.cc b/media/webrtc/trunk/webrtc/test/video_capturer.cc similarity index 89% rename from media/webrtc/trunk/webrtc/video_engine/test/common/video_capturer.cc rename to media/webrtc/trunk/webrtc/test/video_capturer.cc index 9a1bd0c60643..fc37648bc58a 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/video_capturer.cc +++ b/media/webrtc/trunk/webrtc/test/video_capturer.cc @@ -8,11 +8,11 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "webrtc/video_engine/test/common/video_capturer.h" +#include "webrtc/test/video_capturer.h" #include "webrtc/test/testsupport/fileutils.h" -#include "webrtc/video_engine/test/common/frame_generator_capturer.h" -#include "webrtc/video_engine/test/common/vcm_capturer.h" +#include "webrtc/test/frame_generator_capturer.h" +#include "webrtc/test/vcm_capturer.h" namespace webrtc { namespace test { diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/video_capturer.h b/media/webrtc/trunk/webrtc/test/video_capturer.h similarity index 100% rename from media/webrtc/trunk/webrtc/video_engine/test/common/video_capturer.h rename to media/webrtc/trunk/webrtc/test/video_capturer.h diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/video_renderer.cc b/media/webrtc/trunk/webrtc/test/video_renderer.cc similarity index 94% rename from media/webrtc/trunk/webrtc/video_engine/test/common/video_renderer.cc rename to media/webrtc/trunk/webrtc/test/video_renderer.cc index 84aab894c268..1a0c6133ce47 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/video_renderer.cc +++ b/media/webrtc/trunk/webrtc/test/video_renderer.cc @@ -7,7 +7,7 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#include "webrtc/video_engine/test/common/video_renderer.h" +#include "webrtc/test/video_renderer.h" // TODO(pbos): Android renderer diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/video_renderer.h b/media/webrtc/trunk/webrtc/test/video_renderer.h similarity index 96% rename from media/webrtc/trunk/webrtc/video_engine/test/common/video_renderer.h rename to media/webrtc/trunk/webrtc/test/video_renderer.h index a9baf4453608..c8623270a73c 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/video_renderer.h +++ b/media/webrtc/trunk/webrtc/test/video_renderer.h @@ -12,7 +12,7 @@ #include -#include "webrtc/video_engine/new_include/video_renderer.h" +#include "webrtc/video_renderer.h" namespace webrtc { namespace test { diff --git a/media/webrtc/trunk/webrtc/test/w3c/getusermedia_conformance_test.html b/media/webrtc/trunk/webrtc/test/w3c/getusermedia_conformance_test.html index 3b58301ca2c4..8a95032e26f7 100644 --- a/media/webrtc/trunk/webrtc/test/w3c/getusermedia_conformance_test.html +++ b/media/webrtc/trunk/webrtc/test/w3c/getusermedia_conformance_test.html @@ -1,7 +1,16 @@ + - GetUserMedia Browser Conformance Test + getUserMedia Browser Conformance Test @@ -18,328 +27,37 @@ Notice that this requires the site you're browsing to use HTTPS.

This page contains a foundation of conformance tests that can be expanded to cover most things in the W3C specification of the Media Capture and Streams API.

-

VERSION: These tests are based on the W3C Editor's Draft of 25 September - 2012. +

VERSION: These tests are based on the W3C Editor's Draft of August 24th, + 2013 + (http://dev.w3.org/2011/webrtc/editor/archives/20130824/getusermedia.html)

STATUS: In its current state, it only performs simple checks on the various attributes and methods of the objects exposed by the API. There's not much - functionality tested so far.

-

PREREQUISITES: You must have a webcam available on the machine that the - test is executed at.

-

PREFIX: These tests currently utilizes the

webkit
prefix, so - that will have to changed in order to to test conformance with the actual - standard!

-

SPEC: - http://dev.w3.org/2011/webrtc/editor/getusermedia.html

+ functionality tested so far. The spec doesn't define if an attribute shall be + owned by the object itself (assert_own_propety) or if it shall be + inherited (assert_inherits). Since testharness.js doesn't offer + any generic function that covers both, the method for verification is + currently chosen according to the current Chrome implementation.

+

PREFIXES: These tests currently utilizes the adapter.js + script, which handle the prefixes used by different browsers.

+

HOW TO RUN: The easiest way is to tell your browser to: +

    +
  • Provide a fake webcam (--use-fake-ui-for-media-stream in + Chrome)
  • +
  • Automatically allow access to the webcam + (--use-fake-device-for-media-stream in Chrome)
  • +
  • Allow loading HTML files from disk + (--allow-file-access-from-files in Chrome)
  • +
+ Then just load this HTML file to execute the tests.

+ + + + - - + diff --git a/media/webrtc/trunk/webrtc/test/w3c/getusermedia_conformance_test.js b/media/webrtc/trunk/webrtc/test/w3c/getusermedia_conformance_test.js new file mode 100644 index 000000000000..6be293207802 --- /dev/null +++ b/media/webrtc/trunk/webrtc/test/w3c/getusermedia_conformance_test.js @@ -0,0 +1,451 @@ +// Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. +// +// Use of this source code is governed by a BSD-style license +// that can be found in the LICENSE file in the root of the source +// tree. An additional intellectual property rights grant can be found +// in the file PATENTS. All contributing project authors may +// be found in the AUTHORS file in the root of the source tree. + +setup({timeout:10000}); + +// Helper functions to minimize code duplication. +function failedCallback(test) { + return test.step_func(function (error) { + assert_unreached('Should not get an error callback'); + }); +} +function invokeGetUserMedia(test, okCallback) { + getUserMedia({ video: true, audio: true }, okCallback, + failedCallback(test)); +} + +// 4.2 MediaStream. +var mediaStreamTest = async_test('4.2 MediaStream'); + +function verifyMediaStream(stream) { + // TODO(kjellander): Add checks for default values where applicable. + test(function () { + assert_own_property(stream, 'id'); + assert_true(typeof stream.id === 'string'); + assert_readonly(stream, 'id'); + }, '[MediaStream] id attribute'); + + test(function () { + assert_inherits(stream, 'getAudioTracks'); + assert_true(typeof stream.getAudioTracks === 'function'); + }, '[MediaStream] getAudioTracks function'); + + test(function () { + assert_inherits(stream, 'getVideoTracks'); + assert_true(typeof stream.getVideoTracks === 'function'); + }, '[MediaStream] getVideoTracks function'); + + test(function () { + assert_inherits(stream, 'getTrackById'); + assert_true(typeof stream.getTrackById === 'function'); + }, '[MediaStream] getTrackById function'); + + test(function () { + assert_inherits(stream, 'addTrack'); + assert_true(typeof stream.addTrack === 'function'); + }, '[MediaStream] addTrack function'); + + test(function () { + assert_inherits(stream, 'removeTrack'); + assert_true(typeof stream.removeTrack === 'function'); + }, '[MediaStream] removeTrack function'); + + test(function () { + // Missing in Chrome. + assert_inherits(stream, 'clone'); + assert_true(typeof stream.clone === 'function'); + }, '[MediaStream] clone function'); + + test(function () { + assert_own_property(stream, 'ended'); + assert_true(typeof stream.ended === 'boolean'); + assert_readonly(stream, 'ended'); + }, '[MediaStream] ended attribute'); + + test(function () { + assert_own_property(stream, 'onended'); + assert_true(stream.onended === null); + }, '[MediaStream] onended EventHandler'); + + test(function () { + assert_own_property(stream, 'onaddtrack'); + assert_true(stream.onaddtrack === null); + }, '[MediaStream] onaddtrack EventHandler'); + + test(function () { + assert_own_property(stream, 'onremovetrack'); + assert_true(stream.onremovetrack === null); + }, '[MediaStream] onremovetrack EventHandler'); +} + +mediaStreamTest.step(function() { + var okCallback = mediaStreamTest.step_func(function (stream) { + verifyMediaStream(stream); + + var videoTracks = stream.getVideoTracks(); + assert_true(videoTracks.length > 0); + + // Verify event handlers are working. + stream.onaddtrack = onAddTrackCallback + stream.onremovetrack = onRemoveTrackCallback + stream.removeTrack(videoTracks[0]); + stream.addTrack(videoTracks[0]); + mediaStreamTest.done(); + }); + var onAddTrackCallback = mediaStreamTest.step_func(function () { + // TODO(kjellander): verify number of tracks. + mediaStreamTest.done(); + }); + var onRemoveTrackCallback = mediaStreamTest.step_func(function () { + // TODO(kjellander): verify number of tracks. + mediaStreamTest.done(); + }); + invokeGetUserMedia(mediaStreamTest, okCallback);; +}); + +// 4.3 MediaStreamTrack. +var mediaStreamTrackTest = async_test('4.3 MediaStreamTrack'); + +function verifyTrack(type, track) { + test(function () { + assert_own_property(track, 'kind'); + assert_readonly(track, 'kind'); + assert_true(typeof track.kind === 'string', + 'kind is an object (DOMString)'); + }, '[MediaStreamTrack (' + type + ')] kind attribute'); + + test(function () { + assert_own_property(track, 'id'); + assert_readonly(track, 'id'); + assert_true(typeof track.id === 'string', + 'id is an object (DOMString)'); + }, '[MediaStreamTrack (' + type + ')] id attribute'); + + test(function () { + assert_own_property(track, 'label'); + assert_readonly(track, 'label'); + assert_true(typeof track.label === 'string', + 'label is an object (DOMString)'); + }, '[MediaStreamTrack (' + type + ')] label attribute'); + + test(function () { + assert_own_property(track, 'enabled'); + assert_true(typeof track.enabled === 'boolean'); + assert_true(track.enabled, 'enabled property must be true initially'); + }, '[MediaStreamTrack (' + type + ')] enabled attribute'); + + test(function () { + // Missing in Chrome. + assert_own_property(track, 'muted'); + assert_readonly(track, 'muted'); + assert_true(typeof track.muted === 'boolean'); + assert_false(track.muted, 'muted property must be false initially'); + }, '[MediaStreamTrack (' + type + ')] muted attribute'); + + test(function () { + assert_own_property(track, 'onmute'); + assert_true(track.onmute === null); + }, '[MediaStreamTrack (' + type + ')] onmute EventHandler'); + + test(function () { + assert_own_property(track, 'onunmute'); + assert_true(track.onunmute === null); + }, '[MediaStreamTrack (' + type + ')] onunmute EventHandler'); + + test(function () { + // Missing in Chrome. + assert_own_property(track, '_readonly'); + assert_readonly(track, '_readonly'); + assert_true(typeof track._readonly === 'boolean'); + }, '[MediaStreamTrack (' + type + ')] _readonly attribute'); + + test(function () { + // Missing in Chrome. + assert_own_property(track, 'remote'); + assert_readonly(track, 'remote'); + assert_true(typeof track.remote === 'boolean'); + }, '[MediaStreamTrack (' + type + ')] remote attribute'); + + test(function () { + assert_own_property(track, 'readyState'); + assert_readonly(track, 'readyState'); + assert_true(typeof track.readyState === 'string'); + // TODO(kjellander): verify the initial state. + }, '[MediaStreamTrack (' + type + ')] readyState attribute'); + + test(function () { + // Missing in Chrome. + assert_own_property(track, 'onstarted'); + assert_true(track.onstarted === null); + }, '[MediaStreamTrack (' + type + ')] onstarted EventHandler'); + + test(function () { + assert_own_property(track, 'onended'); + assert_true(track.onended === null); + }, '[MediaStreamTrack (' + type + ')] onended EventHandler'); + + test(function () { + // Missing in Chrome. + assert_inherits(track, 'getSourceInfos'); + assert_true(typeof track.getSourceInfos === 'function'); + }, '[MediaStreamTrack (' + type + ')]: getSourceInfos function'); + + test(function () { + // Missing in Chrome. + assert_inherits(track, 'constraints'); + assert_true(typeof track.constraints === 'function'); + }, '[MediaStreamTrack (' + type + ')]: constraints function'); + + test(function () { + // Missing in Chrome. + assert_inherits(track, 'states'); + assert_true(typeof track.states === 'function'); + }, '[MediaStreamTrack (' + type + ')]: states function'); + + test(function () { + // Missing in Chrome. + assert_inherits(track, 'capabilities'); + assert_true(typeof track.capabilities === 'function'); + }, '[MediaStreamTrack (' + type + ')]: capabilities function'); + + test(function () { + // Missing in Chrome. + assert_inherits(track, 'applyConstraints'); + assert_true(typeof track.applyConstraints === 'function'); + }, '[MediaStreamTrack (' + type + ')]: applyConstraints function'); + + test(function () { + // Missing in Chrome. + assert_own_property(track, 'onoverconstrained'); + assert_true(track.onoverconstrained === null); + }, '[MediaStreamTrack (' + type + ')] onoverconstrained EventHandler'); + + test(function () { + // Missing in Chrome. + assert_inherits(track, 'clone'); + assert_true(typeof track.clone === 'function'); + }, '[MediaStreamTrack (' + type + ')] clone function'); + + test(function () { + // Missing in Chrome. + assert_inherits(track, 'stop'); + assert_true(typeof track.stop === 'function'); + }, '[MediaStreamTrack (' + type + ')] stop function'); +}; +mediaStreamTrackTest.step(function() { + var okCallback = mediaStreamTrackTest.step_func(function (stream) { + verifyTrack('audio', stream.getAudioTracks()[0]); + verifyTrack('video', stream.getVideoTracks()[0]); + mediaStreamTrackTest.done(); + }); + invokeGetUserMedia(mediaStreamTrackTest, okCallback); +}); + +mediaStreamTrackTest.step(function() { + var okCallback = mediaStreamTrackTest.step_func(function (stream) { + // Verify event handlers are working. + var track = stream.getVideoTracks()[0]; + track.onended = onendedCallback + track.stop(); + mediaStreamTrackTest.done(); + }); + var onendedCallback = mediaStreamTrackTest.step_func(function () { + assert_true(track.ended); + mediaStreamTrackTest.done(); + }); + invokeGetUserMedia(mediaStreamTrackTest, okCallback); +}); + +// 4.4 MediaStreamTrackEvent tests. +var mediaStreamTrackEventTest = async_test('4.4 MediaStreamTrackEvent'); +mediaStreamTrackEventTest.step(function() { + var okCallback = mediaStreamTrackEventTest.step_func(function (stream) { + // TODO(kjellander): verify attributes + mediaStreamTrackEventTest.done(); + }); + invokeGetUserMedia(mediaStreamTrackEventTest, okCallback); +}); + +// 4.5 Video and Audio Tracks tests. +var avTracksTest = async_test('4.5 Video and Audio Tracks'); +avTracksTest.step(function() { + var okCallback = avTracksTest.step_func(function (stream) { + // TODO(kjellander): verify attributes + avTracksTest.done(); + }); + invokeGetUserMedia(avTracksTest, okCallback); +}); + +// 5. The model: sources, sinks, constraints, and states + +// 6. Source states +// 6.1 Dictionary MediaSourceStates Members + +// 7. Source capabilities +// 7.1 Dictionary CapabilityRange Members +// 7.2 CapabilityList array +// 7.3 Dictionary AllVideoCapabilities Members +// 7.4 Dictionary AllAudioCapabilities Members + +// 8. URL tests. +var createObjectURLTest = async_test('8.1 URL createObjectURL method'); +createObjectURLTest.step(function() { + var okCallback = createObjectURLTest.step_func(function (stream) { + var url = webkitURL.createObjectURL(stream); + assert_true(typeof url === 'string'); + createObjectURLTest.done(); + }); + invokeGetUserMedia(createObjectURLTest, okCallback); +}); + +// 9. MediaStreams as Media Elements. +var mediaElementsTest = async_test('9. MediaStreams as Media Elements'); + +function verifyVideoTagWithStream(videoTag) { + test(function () { + assert_equals(videoTag.buffered.length, 0); + }, '[Video tag] buffered attribute'); + + test(function () { + // Attempts to alter currentTime shall be ignored. + assert_true(videoTag.currentTime >= 0); + assert_throws('InvalidStateError', + function () { videoTag.currentTime = 1234; }, + 'Attempts to modify currentTime shall throw ' + + 'InvalidStateError'); + }, '[Video tag] currentTime attribute'); + + test(function () { + assert_equals(videoTag.duration, Infinity, 'videoTag.duration'); + }, '[Video tag] duration attribute'); + + test(function () { + assert_false(videoTag.seeking, 'videoTag.seeking'); + }, '[Video tag] seeking attribute'); + + test(function () { + assert_equals(videoTag.defaultPlaybackRate, 1.0); + assert_throws('DOMException', + function () { videoTag.defaultPlaybackRate = 2.0; }, + 'Attempts to alter videoTag.defaultPlaybackRate MUST fail'); + }, '[Video tag] defaultPlaybackRate attribute'); + + test(function () { + assert_equals(videoTag.playbackRate, 1.0); + assert_throws('DOMException', + function () { videoTag.playbackRate = 2.0; }, + 'Attempts to alter videoTag.playbackRate MUST fail'); + }, '[Video tag] playbackRate attribute'); + + test(function () { + assert_equals(videoTag.played.length, 1, 'videoTag.played.length'); + assert_equals(videoTag.played.start(0), 0); + assert_true(videoTag.played.end(0) >= videoTag.currentTime); + }, '[Video tag] played attribute'); + + test(function () { + assert_equals(videoTag.seekable.length, 0); + assert_equals(videoTag.seekable.start(), videoTag.currentTime); + assert_equals(videoTag.seekable.end(), videoTag.currentTime); + assert_equals(videoTag.startDate, NaN, 'videoTag.startDate'); + }, '[Video tag] seekable attribute'); + + test(function () { + assert_false(videoTag.loop); + }, '[Video tag] loop attribute'); +}; + +mediaElementsTest.step(function() { + var okCallback = mediaElementsTest.step_func(function (stream) { + var videoTag = document.getElementById('local-view'); + // Call the polyfill wrapper to attach the media stream to this element. + attachMediaStream(videoTag, stream); + verifyVideoTagWithStream(videoTag); + mediaElementsTest.done(); + }); + invokeGetUserMedia(mediaElementsTest, okCallback); +}); + +// 11. Obtaining local multimedia content. + +// 11.1 NavigatorUserMedia. +var getUserMediaTest = async_test('11.1 NavigatorUserMedia'); +getUserMediaTest.step(function() { + var okCallback = getUserMediaTest.step_func(function (stream) { + assert_true(stream !== null); + getUserMediaTest.done(); + }); + + // boolean parameters, without failure callback: + getUserMedia({ video: true, audio: true }, okCallback); + getUserMedia({ video: true, audio: false }, okCallback); + getUserMedia({ video: false, audio: true }, okCallback); + + // boolean parameters, with failure callback: + getUserMedia({ video: true, audio: true }, okCallback, + failedCallback(getUserMediaTest)); + getUserMedia({ video: true, audio: false }, okCallback, + failedCallback(getUserMediaTest)); + getUserMedia({ video: false, audio: true }, okCallback, + failedCallback(getUserMediaTest)); +}); + +// 11.2 MediaStreamConstraints. +var constraintsTest = async_test('11.2 MediaStreamConstraints'); +constraintsTest.step(function() { + var okCallback = constraintsTest.step_func(function (stream) { + assert_true(stream !== null); + constraintsTest.done(); + }); + + // Constraints on video. + // See http://webrtc.googlecode.com/svn/trunk/samples/js/demos/html/constraints-and-stats.html + // for more examples of constraints. + var constraints = {}; + constraints.audio = true; + constraints.video = { mandatory: {}, optional: [] }; + constraints.video.mandatory.minWidth = 640; + constraints.video.mandatory.minHeight = 480; + constraints.video.mandatory.minFrameRate = 15; + + getUserMedia(constraints, okCallback, failedCallback(constraintsTest)); +}); + +// 11.3 NavigatorUserMediaSuccessCallback. +var successCallbackTest = + async_test('11.3 NavigatorUserMediaSuccessCallback'); +successCallbackTest.step(function() { + var okCallback = successCallbackTest.step_func(function (stream) { + assert_true(stream !== null); + successCallbackTest.done(); + }); + invokeGetUserMedia(successCallbackTest, okCallback); +}); + +// 11.4 NavigatorUserMediaError and NavigatorUserMediaErrorCallback. +var errorCallbackTest = async_test('11.4 NavigatorUserMediaError and ' + + 'NavigatorUserMediaErrorCallback'); +errorCallbackTest.step(function() { + var okCallback = errorCallbackTest.step_func(function (stream) { + assert_unreached('Should not get a success callback'); + }); + var errorCallback = errorCallbackTest.step_func(function (error) { + assert_own_property(error, 'name'); + assert_readonly(error.name); + assert_true(typeof error.name === 'string'); + assert_equals(error.name, 'ConstraintNotSatisfiedError', 'error.name'); + errorCallbackTest.done(); + }); + // Setting both audio and video to false triggers an error callback. + // TODO(kjellander): Figure out if there's a way in the spec to trigger an + // error callback. + + // TODO(kjellander): Investigate why the error callback is not called when + // false/false is provided in Chrome. + getUserMedia({ video: false, audio: false }, okCallback, errorCallback); +}); diff --git a/media/webrtc/trunk/webrtc/test/webrtc_test_common.gyp b/media/webrtc/trunk/webrtc/test/webrtc_test_common.gyp new file mode 100644 index 000000000000..eac8b97ec622 --- /dev/null +++ b/media/webrtc/trunk/webrtc/test/webrtc_test_common.gyp @@ -0,0 +1,148 @@ +# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. +{ + 'includes': [ + '../build/common.gypi', + ], + 'targets': [ + { + 'target_name': 'webrtc_test_common', + 'type': 'static_library', + 'sources': [ + 'configurable_frame_size_encoder.cc', + 'configurable_frame_size_encoder.h', + 'direct_transport.cc', + 'direct_transport.h', + 'fake_audio_device.cc', + 'fake_audio_device.h', + 'fake_decoder.cc', + 'fake_decoder.h', + 'fake_encoder.cc', + 'fake_encoder.h', + 'fake_network_pipe.cc', + 'fake_network_pipe.h', + 'flags.cc', + 'flags.h', + 'frame_generator_capturer.cc', + 'frame_generator_capturer.h', + 'gl/gl_renderer.cc', + 'gl/gl_renderer.h', + 'linux/glx_renderer.cc', + 'linux/glx_renderer.h', + 'linux/video_renderer_linux.cc', + 'mac/run_tests.mm', + 'mac/video_renderer_mac.h', + 'mac/video_renderer_mac.mm', + 'mock_transport.h', + 'null_platform_renderer.cc', + 'null_transport.cc', + 'null_transport.h', + 'rtp_rtcp_observer.h', + 'run_tests.cc', + 'run_tests.h', + 'run_loop.cc', + 'run_loop.h', + 'statistics.cc', + 'statistics.h', + 'vcm_capturer.cc', + 'vcm_capturer.h', + 'video_capturer.cc', + 'video_capturer.h', + 'video_renderer.cc', + 'video_renderer.h', + 'win/d3d_renderer.cc', + 'win/d3d_renderer.h', + 'win/run_loop_win.cc', + ], + 'conditions': [ + ['OS=="linux"', { + 'sources!': [ + 'null_platform_renderer.cc', + ], + }], + ['OS=="mac"', { + 'sources!': [ + 'null_platform_renderer.cc', + 'run_tests.cc', + ], + }], + ['OS!="linux" and OS!="mac"', { + 'sources!' : [ + 'gl/gl_renderer.cc', + 'gl/gl_renderer.h', + ], + }], + ['OS=="win"', { + 'sources!': [ + 'null_platform_renderer.cc', + 'run_loop.cc', + ], + }], + ], + 'direct_dependent_settings': { + 'conditions': [ + ['OS=="linux"', { + 'libraries': [ + '-lXext', + '-lX11', + '-lGL', + ], + }], + #TODO(pbos) : These dependencies should not have to be here, they + # aren't used by test code directly, only by components + # used by the tests. + ['OS=="android"', { + 'libraries' : [ + '-lGLESv2', '-llog', + ], + }], + ['OS=="mac"', { + 'xcode_settings' : { + 'OTHER_LDFLAGS' : [ + '-framework Foundation', + '-framework AppKit', + '-framework Cocoa', + '-framework OpenGL', + '-framework CoreVideo', + '-framework CoreAudio', + '-framework AudioToolbox', + ], + }, + }], + ], + }, + 'dependencies': [ + '<(DEPTH)/testing/gtest.gyp:gtest', + '<(DEPTH)/third_party/gflags/gflags.gyp:gflags', + '<(webrtc_root)/modules/modules.gyp:video_capture_module', + '<(webrtc_root)/modules/modules.gyp:media_file', + '<(webrtc_root)/test/test.gyp:frame_generator', + '<(webrtc_root)/test/test.gyp:test_support', + ], + }, + ], + 'conditions': [ + ['include_tests==1', { + 'targets': [ + { + 'target_name': 'webrtc_test_common_unittests', + 'type': '<(gtest_target_type)', + 'dependencies': [ + 'webrtc_test_common', + '<(DEPTH)/testing/gtest.gyp:gtest', + '<(DEPTH)/testing/gmock.gyp:gmock', + '<(webrtc_root)/test/test.gyp:test_support_main', + ], + 'sources': [ + 'fake_network_pipe_unittest.cc', + ], + }, + ], #targets + }], # include_tests + ], # conditions +} diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/win/d3d_renderer.cc b/media/webrtc/trunk/webrtc/test/win/d3d_renderer.cc similarity index 99% rename from media/webrtc/trunk/webrtc/video_engine/test/common/win/d3d_renderer.cc rename to media/webrtc/trunk/webrtc/test/win/d3d_renderer.cc index 2f9b29f4a031..7da8f445b02f 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/win/d3d_renderer.cc +++ b/media/webrtc/trunk/webrtc/test/win/d3d_renderer.cc @@ -7,7 +7,7 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#include "webrtc/video_engine/test/common/win/d3d_renderer.h" +#include "webrtc/test/win/d3d_renderer.h" #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/win/d3d_renderer.h b/media/webrtc/trunk/webrtc/test/win/d3d_renderer.h similarity index 96% rename from media/webrtc/trunk/webrtc/video_engine/test/common/win/d3d_renderer.h rename to media/webrtc/trunk/webrtc/test/win/d3d_renderer.h index 38efa4628a39..e8b06926171f 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/win/d3d_renderer.h +++ b/media/webrtc/trunk/webrtc/test/win/d3d_renderer.h @@ -14,8 +14,8 @@ #include #include "webrtc/system_wrappers/interface/scoped_refptr.h" +#include "webrtc/test/video_renderer.h" #include "webrtc/typedefs.h" -#include "webrtc/video_engine/test/common/video_renderer.h" namespace webrtc { namespace test { diff --git a/media/webrtc/trunk/webrtc/video_engine/test/common/win/run_loop_win.cc b/media/webrtc/trunk/webrtc/test/win/run_loop_win.cc similarity index 93% rename from media/webrtc/trunk/webrtc/video_engine/test/common/win/run_loop_win.cc rename to media/webrtc/trunk/webrtc/test/win/run_loop_win.cc index c62eeb1318a0..ec29cc5a6734 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/common/win/run_loop_win.cc +++ b/media/webrtc/trunk/webrtc/test/win/run_loop_win.cc @@ -7,7 +7,7 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#include "webrtc/video_engine/test/common/run_loop.h" +#include "webrtc/test/run_loop.h" #include diff --git a/media/webrtc/trunk/webrtc/tools/barcode_tools/barcode_decoder.py b/media/webrtc/trunk/webrtc/tools/barcode_tools/barcode_decoder.py index 0bf8b5b41de5..b7b7ddd4a6e9 100755 --- a/media/webrtc/trunk/webrtc/tools/barcode_tools/barcode_decoder.py +++ b/media/webrtc/trunk/webrtc/tools/barcode_tools/barcode_decoder.py @@ -11,15 +11,18 @@ import optparse import os import sys -import helper_functions +if __name__ == '__main__': + # Make sure we always can import helper_functions. + sys.path.append(os.path.dirname(__file__)) +import helper_functions # Chrome browsertests will throw away stderr; avoid that output gets lost. sys.stderr = sys.stdout def convert_yuv_to_png_files(yuv_file_name, yuv_frame_width, yuv_frame_height, - output_directory, ffmpeg_dir=None): + output_directory, ffmpeg_path): """Converts a YUV video file into PNG frames. The function uses ffmpeg to convert the YUV file. The output of ffmpeg is in @@ -31,18 +34,17 @@ def convert_yuv_to_png_files(yuv_file_name, yuv_frame_width, yuv_frame_height, yuv_frame_height(int): The height of one YUV frame. output_directory(string): The output directory where the PNG frames will be stored. - ffmpeg_dir(string): The directory containing the ffmpeg executable. If - omitted, the PATH will be searched for it. + ffmpeg_path(string): The path to the ffmpeg executable. If None, the PATH + will be searched for it. Return: (bool): True if the conversion was OK. """ size_string = str(yuv_frame_width) + 'x' + str(yuv_frame_height) output_files_pattern = os.path.join(output_directory, 'frame_%04d.png') - ffmpeg_executable = 'ffmpeg.exe' if sys.platform == 'win32' else 'ffmpeg' - if ffmpeg_dir: - ffmpeg_executable = os.path.join(ffmpeg_dir, ffmpeg_executable) - command = [ffmpeg_executable, '-s', '%s' % size_string, '-i', '%s' + if not ffmpeg_path: + ffmpeg_path = 'ffmpeg.exe' if sys.platform == 'win32' else 'ffmpeg' + command = [ffmpeg_path, '-s', '%s' % size_string, '-i', '%s' % yuv_file_name, '-f', 'image2', '-vcodec', 'png', '%s' % output_files_pattern] try: @@ -54,12 +56,12 @@ def convert_yuv_to_png_files(yuv_file_name, yuv_frame_width, yuv_frame_height, print 'Error executing command: %s. Error: %s' % (command, err) return False except OSError: - print ('Did not find %s. Have you installed it?' % ffmpeg_executable) + print ('Did not find %s. Have you installed it?' % ffmpeg_path) return False return True -def decode_frames(input_directory, zxing_dir=None): +def decode_frames(input_directory, zxing_path): """Decodes the barcodes overlaid in each frame. The function uses the Zxing command-line tool from the Zxing C++ distribution @@ -73,19 +75,18 @@ def decode_frames(input_directory, zxing_dir=None): Args: input_directory(string): The input directory from where the PNG frames are read. - zxing_dir(string): The directory containing the zxing executable. If - omitted, the PATH will be searched for it. + zxing_path(string): The path to the zxing binary. If specified as None, + the PATH will be searched for it. Return: - (bool): True if the decoding went without errors. + (bool): True if the decoding succeeded. """ - zxing_executable = 'zxing.exe' if sys.platform == 'win32' else 'zxing' - if zxing_dir: - zxing_executable = os.path.join(zxing_dir, zxing_executable) - print 'Decoding barcodes from PNG files with %s...' % zxing_executable + if not zxing_path: + zxing_path = 'zxing.exe' if sys.platform == 'win32' else 'zxing' + print 'Decoding barcodes from PNG files with %s...' % zxing_path return helper_functions.perform_action_on_all_files( directory=input_directory, file_pattern='frame_', file_extension='png', start_number=1, action=_decode_barcode_in_file, - command_line_decoder=zxing_executable) + command_line_decoder=zxing_path) def _decode_barcode_in_file(file_name, command_line_decoder): @@ -102,7 +103,6 @@ def _decode_barcode_in_file(file_name, command_line_decoder): try: out = helper_functions.run_shell_command( command, fail_msg='Error during decoding of %s' % file_name) - print 'Image %s : decoded barcode: %s' % (file_name, out) text_file = open('%s.txt' % file_name[:-4], 'w') text_file.write(out) text_file.close() @@ -230,14 +230,14 @@ def _parse_args(): usage = "usage: %prog [options]" parser = optparse.OptionParser(usage=usage) - parser.add_option('--zxing_dir', type='string', - help=('The path to the directory where the zxing executable' - 'is located. If omitted, it will be assumed to be ' - 'present in the PATH.')) - parser.add_option('--ffmpeg_dir', type='string', default=None, - help=('The path to the directory where the ffmpeg ' - 'executable is located. If omitted, it will be ' - 'assumed to be present in the PATH.')) + parser.add_option('--zxing_path', type='string', + help=('The path to where the zxing executable is located. ' + 'If omitted, it will be assumed to be present in the ' + 'PATH with the name zxing[.exe].')) + parser.add_option('--ffmpeg_path', type='string', + help=('The path to where the ffmpeg executable is located. ' + 'If omitted, it will be assumed to be present in the ' + 'PATH with the name ffmpeg[.exe].')) parser.add_option('--yuv_frame_width', type='int', default=640, help='Width of the YUV file\'s frames. Default: %default') parser.add_option('--yuv_frame_height', type='int', default=480, @@ -271,13 +271,13 @@ def _main(): if not convert_yuv_to_png_files(options.yuv_file, options.yuv_frame_width, options.yuv_frame_height, output_directory=options.png_working_dir, - ffmpeg_dir=options.ffmpeg_dir): + ffmpeg_path=options.ffmpeg_path): print 'An error occurred converting from YUV to PNG frames.' return -1 # Decode the barcodes from the PNG frames. if not decode_frames(input_directory=options.png_working_dir, - zxing_dir=options.zxing_dir): + zxing_path=options.zxing_path): print 'An error occurred decoding barcodes from PNG frames.' return -2 diff --git a/media/webrtc/trunk/webrtc/tools/compare_videos.py b/media/webrtc/trunk/webrtc/tools/compare_videos.py index afdd6a467b85..f6275a67d3b7 100755 --- a/media/webrtc/trunk/webrtc/tools/compare_videos.py +++ b/media/webrtc/trunk/webrtc/tools/compare_videos.py @@ -9,8 +9,10 @@ import optparse import os +import shutil import subprocess import sys +import tempfile SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) @@ -24,7 +26,7 @@ def _ParseArgs(): usage = 'usage: %prog [options]' parser = optparse.OptionParser(usage=usage) - parser.add_option('--label', type='string', default="MY_TEST", + parser.add_option('--label', type='string', default='MY_TEST', help=('Label of the test, used to identify different ' 'tests. Default: %default')) parser.add_option('--ref_video', type='string', @@ -34,6 +36,18 @@ def _ParseArgs(): 'video (YUV).')) parser.add_option('--frame_analyzer', type='string', help='Path to the frame analyzer executable.') + parser.add_option('--barcode_decoder', type='string', + help=('Path to the barcode decoder script. By default, we ' + 'will assume we can find it in barcode_tools/' + 'relative to this directory.')) + parser.add_option('--ffmpeg_path', type='string', + help=('The path to where the ffmpeg executable is located. ' + 'If omitted, it will be assumed to be present in the ' + 'PATH with the name ffmpeg[.exe].')) + parser.add_option('--zxing_path', type='string', + help=('The path to where the zxing executable is located. ' + 'If omitted, it will be assumed to be present in the ' + 'PATH with the name zxing[.exe].')) parser.add_option('--stats_file', type='string', default='stats.txt', help=('Path to the temporary stats file to be created and ' 'used. Default: %default')) @@ -77,9 +91,18 @@ def main(): """ options = _ParseArgs() + if options.barcode_decoder: + path_to_decoder = options.barcode_decoder + else: + path_to_decoder = os.path.join(SCRIPT_DIR, 'barcode_tools', + 'barcode_decoder.py') + + # On Windows, sometimes the inherited stdin handle from the parent process + # fails. Work around this by passing null to stdin to the subprocesses. + null_filehandle = open(os.devnull, 'r') + # Run barcode decoder on the test video to identify frame numbers. - path_to_decoder = os.path.join(SCRIPT_DIR, 'barcode_tools', - 'barcode_decoder.py') + png_working_directory = tempfile.mkdtemp() cmd = [ sys.executable, path_to_decoder, @@ -87,9 +110,17 @@ def main(): '--yuv_frame_width=%d' % options.yuv_frame_width, '--yuv_frame_height=%d' % options.yuv_frame_height, '--stats_file=%s' % options.stats_file, + '--png_working_dir=%s' % png_working_directory, ] - barcode_decoder = subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr) + if options.zxing_path: + cmd.append('--zxing_path=%s' % options.zxing_path) + if options.ffmpeg_path: + cmd.append('--ffmpeg_path=%s' % options.ffmpeg_path) + barcode_decoder = subprocess.Popen(cmd, stdin=null_filehandle, + stdout=sys.stdout, stderr=sys.stderr) barcode_decoder.wait() + + shutil.rmtree(png_working_directory) if barcode_decoder.returncode != 0: print 'Failed to run barcode decoder script.' return 1 @@ -104,7 +135,8 @@ def main(): '--width=%d' % options.yuv_frame_width, '--height=%d' % options.yuv_frame_height, ] - frame_analyzer = subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr) + frame_analyzer = subprocess.Popen(cmd, stdin=null_filehandle, + stdout=sys.stdout, stderr=sys.stderr) frame_analyzer.wait() if frame_analyzer.returncode != 0: print 'Failed to run frame analyzer.' diff --git a/media/webrtc/trunk/webrtc/tools/e2e_quality/audio/README b/media/webrtc/trunk/webrtc/tools/e2e_quality/audio/README new file mode 100644 index 000000000000..aa853855853d --- /dev/null +++ b/media/webrtc/trunk/webrtc/tools/e2e_quality/audio/README @@ -0,0 +1,27 @@ +The tools here run an end-to-end audio quality test on Linux using PulseAudio. + +INSTALLATION +The test depends on PulseAudio virtual devices (null sinks). Without additional +arguments, run_audio_test.py expects a pair of sinks named "capture" and +"render". To create these devices at machine startup, place the provided +default.pa file in ~/.pulse. Alternately, the "pacmd" commands therein can be +run on the command-line to create the devices. + +Similarly, place the provided daemon.conf file in ~/.pulse to use high quality +resampling in PulseAudio. This will reduce the resampling impact on the outcome +of the test. + +Build all WebRTC targets as usual (or just the audio_e2e_harness target) to +generate the VoiceEngine harness. + +USAGE +Run run_audio_test.py to start. The script has reasonable defaults and will +use the expected location of audio_e2e_harness. Some settings will usually +be provided by the user, particularly the comparison tool command-line and +regular expression to extract the quality metric. + +An example command-line, run from trunk/ + +webrtc/tools/e2e_quality/audio/run_audio_test.py \ +--input=data/voice_engine/audio_short16.pcm --output=e2e_audio_out.pcm \ +--codec=L16 --compare="comparison-tool" --regexp="(\d\.\d{3})" diff --git a/media/webrtc/trunk/webrtc/tools/e2e_quality/audio/audio_e2e_harness.cc b/media/webrtc/trunk/webrtc/tools/e2e_quality/audio/audio_e2e_harness.cc new file mode 100644 index 000000000000..4d443773fd5f --- /dev/null +++ b/media/webrtc/trunk/webrtc/tools/e2e_quality/audio/audio_e2e_harness.cc @@ -0,0 +1,108 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// Sets up a simple VoiceEngine loopback call with the default audio devices +// and runs forever. Some parameters can be configured through command-line +// flags. + +#include "gflags/gflags.h" +#include "gtest/gtest.h" + +#include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/test/channel_transport/include/channel_transport.h" +#include "webrtc/voice_engine/include/voe_audio_processing.h" +#include "webrtc/voice_engine/include/voe_base.h" +#include "webrtc/voice_engine/include/voe_codec.h" +#include "webrtc/voice_engine/include/voe_hardware.h" +#include "webrtc/voice_engine/include/voe_network.h" + +DEFINE_string(render, "render", "render device name"); +DEFINE_string(codec, "ISAC", "codec name"); +DEFINE_int32(rate, 16000, "codec sample rate in Hz"); + +namespace webrtc { +namespace test { + +void RunHarness() { + VoiceEngine* voe = VoiceEngine::Create(); + ASSERT_TRUE(voe != NULL); + VoEAudioProcessing* audio = VoEAudioProcessing::GetInterface(voe); + ASSERT_TRUE(audio != NULL); + VoEBase* base = VoEBase::GetInterface(voe); + ASSERT_TRUE(base != NULL); + VoECodec* codec = VoECodec::GetInterface(voe); + ASSERT_TRUE(codec != NULL); + VoEHardware* hardware = VoEHardware::GetInterface(voe); + ASSERT_TRUE(hardware != NULL); + VoENetwork* network = VoENetwork::GetInterface(voe); + ASSERT_TRUE(network != NULL); + + ASSERT_EQ(0, base->Init()); + int channel = base->CreateChannel(); + ASSERT_NE(-1, channel); + + scoped_ptr voice_channel_transport( + new VoiceChannelTransport(network, channel)); + + ASSERT_EQ(0, voice_channel_transport->SetSendDestination("127.0.0.1", 1234)); + ASSERT_EQ(0, voice_channel_transport->SetLocalReceiver(1234)); + + CodecInst codec_params = {0}; + bool codec_found = false; + for (int i = 0; i < codec->NumOfCodecs(); i++) { + ASSERT_EQ(0, codec->GetCodec(i, codec_params)); + if (FLAGS_codec.compare(codec_params.plname) == 0 && + FLAGS_rate == codec_params.plfreq) { + codec_found = true; + break; + } + } + ASSERT_TRUE(codec_found); + ASSERT_EQ(0, codec->SetSendCodec(channel, codec_params)); + + int num_devices = 0; + ASSERT_EQ(0, hardware->GetNumOfPlayoutDevices(num_devices)); + char device_name[128] = {0}; + char guid[128] = {0}; + bool device_found = false; + int device_index; + for (device_index = 0; device_index < num_devices; device_index++) { + ASSERT_EQ(0, hardware->GetPlayoutDeviceName(device_index, device_name, + guid)); + if (FLAGS_render.compare(device_name) == 0) { + device_found = true; + break; + } + } + ASSERT_TRUE(device_found); + ASSERT_EQ(0, hardware->SetPlayoutDevice(device_index)); + + // Disable all audio processing. + ASSERT_EQ(0, audio->SetAgcStatus(false)); + ASSERT_EQ(0, audio->SetEcStatus(false)); + ASSERT_EQ(0, audio->EnableHighPassFilter(false)); + ASSERT_EQ(0, audio->SetNsStatus(false)); + + ASSERT_EQ(0, base->StartReceive(channel)); + ASSERT_EQ(0, base->StartPlayout(channel)); + ASSERT_EQ(0, base->StartSend(channel)); + + // Run forever... + while (1) { + } +} + +} // namespace test +} // namespace webrtc + +int main(int argc, char** argv) { + google::ParseCommandLineFlags(&argc, &argv, true); + webrtc::test::RunHarness(); +} diff --git a/media/webrtc/trunk/webrtc/tools/e2e_quality/audio/daemon.conf b/media/webrtc/trunk/webrtc/tools/e2e_quality/audio/daemon.conf new file mode 100644 index 000000000000..26c4df4c8002 --- /dev/null +++ b/media/webrtc/trunk/webrtc/tools/e2e_quality/audio/daemon.conf @@ -0,0 +1 @@ +resample-method = speex-float-9 diff --git a/media/webrtc/trunk/webrtc/tools/e2e_quality/audio/default.pa b/media/webrtc/trunk/webrtc/tools/e2e_quality/audio/default.pa new file mode 100755 index 000000000000..adef2dbe50c7 --- /dev/null +++ b/media/webrtc/trunk/webrtc/tools/e2e_quality/audio/default.pa @@ -0,0 +1,6 @@ +# Place in ~/.pulse/ to add null sinks for the audio end-to-end quality test. + +.include /etc/pulse/default.pa + +load-module module-null-sink sink_name=render sink_properties=device.description=render format=s16 rate=48000 channels=1 +load-module module-null-sink sink_name=capture sink_properties=device.description=capture format=s16 rate=48000 channels=1 diff --git a/media/webrtc/trunk/webrtc/tools/e2e_quality/audio/perf b/media/webrtc/trunk/webrtc/tools/e2e_quality/audio/perf new file mode 120000 index 000000000000..fa37c96882b9 --- /dev/null +++ b/media/webrtc/trunk/webrtc/tools/e2e_quality/audio/perf @@ -0,0 +1 @@ +../../../../tools/perf \ No newline at end of file diff --git a/media/webrtc/trunk/webrtc/tools/e2e_quality/audio/run_audio_test.py b/media/webrtc/trunk/webrtc/tools/e2e_quality/audio/run_audio_test.py new file mode 100755 index 000000000000..51caa42cd9e4 --- /dev/null +++ b/media/webrtc/trunk/webrtc/tools/e2e_quality/audio/run_audio_test.py @@ -0,0 +1,131 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +"""Runs an end-to-end audio quality test on Linux. + +Expects the presence of PulseAudio virtual devices (null sinks). These are +configured as default devices for a VoiceEngine audio call. A PulseAudio +utility (pacat) is used to play to and record from the virtual devices. + +The input reference file is then compared to the output file. +""" + +import optparse +import os +import re +import shlex +import subprocess +import sys +import time + +import perf.perf_utils + +def main(argv): + parser = optparse.OptionParser() + usage = 'Usage: %prog [options]' + parser.set_usage(usage) + parser.add_option('--input', default='input.pcm', help='input PCM file') + parser.add_option('--output', default='output.pcm', help='output PCM file') + parser.add_option('--codec', default='ISAC', help='codec name') + parser.add_option('--rate', default='16000', help='sample rate in Hz') + parser.add_option('--channels', default='1', help='number of channels') + parser.add_option('--play_sink', default='capture', + help='name of PulseAudio sink to which to play audio') + parser.add_option('--rec_sink', default='render', + help='name of PulseAudio sink whose monitor will be recorded') + parser.add_option('--harness', + default=os.path.abspath(os.path.dirname(sys.argv[0]) + + '/../../../out/Debug/audio_e2e_harness'), + help='path to audio harness executable') + parser.add_option('--compare', + help='command-line arguments for comparison tool') + parser.add_option('--regexp', + help='regular expression to extract the comparison metric') + options, _ = parser.parse_args(argv[1:]) + + # Get the initial default capture device, to restore later. + command = ['pacmd', 'list-sources'] + print ' '.join(command) + proc = subprocess.Popen(command, stdout=subprocess.PIPE) + output = proc.communicate()[0] + if proc.returncode != 0: + return proc.returncode + default_source = re.search(r'(^ \* index: )([0-9]+$)', output, + re.MULTILINE).group(2) + + # Set the default capture device to be used by VoiceEngine. We unfortunately + # need to do this rather than select the devices directly through the harness + # because monitor sources don't appear in VoiceEngine except as defaults. + # + # We pass the render device for VoiceEngine to select because (for unknown + # reasons) the virtual device is sometimes not used when the default. + command = ['pacmd', 'set-default-source', options.play_sink + '.monitor'] + print ' '.join(command) + retcode = subprocess.call(command, stdout=subprocess.PIPE) + if retcode != 0: + return retcode + + command = [options.harness, '--render=' + options.rec_sink, + '--codec=' + options.codec, '--rate=' + options.rate] + print ' '.join(command) + voe_proc = subprocess.Popen(command) + + # If recording starts before there is data available, pacat sometimes + # inexplicably adds a large delay to the start of the file. We wait here in + # an attempt to prevent that, because VoE often takes some time to startup a + # call. + time.sleep(5) + + format_args = ['--format=s16le', '--rate=' + options.rate, + '--channels=' + options.channels, '--raw'] + command = (['pacat', '-p', '-d', options.play_sink] + format_args + + [options.input]) + print ' '.join(command) + play_proc = subprocess.Popen(command) + + command = (['pacat', '-r', '-d', options.rec_sink + '.monitor'] + + format_args + [options.output]) + print ' '.join(command) + record_proc = subprocess.Popen(command) + + retcode = play_proc.wait() + # If these ended early, an exception will be thrown here. + record_proc.kill() + voe_proc.kill() + if retcode != 0: + return retcode + + # Restore the initial default capture device. + command = ['pacmd', 'set-default-source', default_source] + print ' '.join(command) + retcode = subprocess.call(command, stdout=subprocess.PIPE) + if retcode != 0: + return retcode + + if options.compare and options.regexp: + command = shlex.split(options.compare) + [options.input, options.output] + print ' '.join(command) + proc = subprocess.Popen(command, stdout=subprocess.PIPE) + output = proc.communicate()[0] + if proc.returncode != 0: + return proc.returncode + + # The list should only contain one item. + value = ''.join(re.findall(options.regexp, output)) + + perf.perf_utils.PrintPerfResult(graph_name='audio_e2e_score', + series_name='e2e_score', + data_point=value, + units='MOS') # Assuming we run PESQ. + + return 0 + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/media/webrtc/trunk/webrtc/tools/frame_analyzer/video_quality_analysis.cc b/media/webrtc/trunk/webrtc/tools/frame_analyzer/video_quality_analysis.cc index 6e738c245c00..b8c90b10718e 100644 --- a/media/webrtc/trunk/webrtc/tools/frame_analyzer/video_quality_analysis.cc +++ b/media/webrtc/trunk/webrtc/tools/frame_analyzer/video_quality_analysis.cc @@ -232,6 +232,11 @@ void RunAnalysis(const char* reference_file_name, const char* test_file_name, void PrintMaxRepeatedAndSkippedFrames(const std::string& label, const std::string& stats_file_name) { + PrintMaxRepeatedAndSkippedFrames(stdout, label, stats_file_name); +} + +void PrintMaxRepeatedAndSkippedFrames(FILE* output, const std::string& label, + const std::string& stats_file_name) { FILE* stats_file = fopen(stats_file_name.c_str(), "r"); if (stats_file == NULL) { fprintf(stderr, "Couldn't open stats file for reading: %s\n", @@ -271,33 +276,38 @@ void PrintMaxRepeatedAndSkippedFrames(const std::string& label, } previous_frame_number = decoded_frame_number; } - fprintf(stdout, "RESULT Max_repeated: %s= %d\n", label.c_str(), + fprintf(output, "RESULT Max_repeated: %s= %d\n", label.c_str(), max_repeated_frames); - fprintf(stdout, "RESULT Max_skipped: %s= %d\n", label.c_str(), + fprintf(output, "RESULT Max_skipped: %s= %d\n", label.c_str(), max_skipped_frames); fclose(stats_file); } void PrintAnalysisResults(const std::string& label, ResultsContainer* results) { + PrintAnalysisResults(stdout, label, results); +} + +void PrintAnalysisResults(FILE* output, const std::string& label, + ResultsContainer* results) { std::vector::iterator iter; - fprintf(stdout, "RESULT Unique_frames_count: %s= %u\n", label.c_str(), + fprintf(output, "RESULT Unique_frames_count: %s= %u\n", label.c_str(), static_cast(results->frames.size())); if (results->frames.size() > 0u) { - fprintf(stdout, "RESULT PSNR: %s= [", label.c_str()); + fprintf(output, "RESULT PSNR: %s= [", label.c_str()); for (iter = results->frames.begin(); iter != results->frames.end() - 1; ++iter) { - fprintf(stdout, "%f,", iter->psnr_value); + fprintf(output, "%f,", iter->psnr_value); } - fprintf(stdout, "%f] dB\n", iter->psnr_value); + fprintf(output, "%f] dB\n", iter->psnr_value); - fprintf(stdout, "RESULT SSIM: %s= [", label.c_str()); + fprintf(output, "RESULT SSIM: %s= [", label.c_str()); for (iter = results->frames.begin(); iter != results->frames.end() - 1; ++iter) { - fprintf(stdout, "%f,", iter->ssim_value); + fprintf(output, "%f,", iter->ssim_value); } - fprintf(stdout, "%f]\n", iter->ssim_value); + fprintf(output, "%f]\n", iter->ssim_value); } } diff --git a/media/webrtc/trunk/webrtc/tools/frame_analyzer/video_quality_analysis.h b/media/webrtc/trunk/webrtc/tools/frame_analyzer/video_quality_analysis.h index e78fa616b9af..b2ecc082c75b 100644 --- a/media/webrtc/trunk/webrtc/tools/frame_analyzer/video_quality_analysis.h +++ b/media/webrtc/trunk/webrtc/tools/frame_analyzer/video_quality_analysis.h @@ -67,11 +67,19 @@ double CalculateMetrics(VideoAnalysisMetricsType video_metrics_type, // no output will be written. void PrintAnalysisResults(const std::string& label, ResultsContainer* results); +// Similar to the above, but will print to the specified file handle. +void PrintAnalysisResults(FILE* output, const std::string& label, + ResultsContainer* results); + // Calculates max repeated and skipped frames and prints them to stdout in a // format that is compatible with Chromium performance numbers. void PrintMaxRepeatedAndSkippedFrames(const std::string& label, const std::string& stats_file_name); +// Similar to the above, but will print to the specified file handle. +void PrintMaxRepeatedAndSkippedFrames(FILE* output, const std::string& label, + const std::string& stats_file_name); + // Gets the next line from an open stats file. bool GetNextStatsLine(FILE* stats_file, char* line); diff --git a/media/webrtc/trunk/webrtc/tools/frame_analyzer/video_quality_analysis_unittest.cc b/media/webrtc/trunk/webrtc/tools/frame_analyzer/video_quality_analysis_unittest.cc index a24d4ae3d14e..85fd11892cde 100644 --- a/media/webrtc/trunk/webrtc/tools/frame_analyzer/video_quality_analysis_unittest.cc +++ b/media/webrtc/trunk/webrtc/tools/frame_analyzer/video_quality_analysis_unittest.cc @@ -21,41 +21,59 @@ namespace webrtc { namespace test { +// Setup a log file to write the output to instead of stdout because we don't +// want those numbers to be picked up as perf numbers. +class VideoQualityAnalysisTest : public ::testing::Test { + protected: + static void SetUpTestCase() { + std::string log_filename = webrtc::test::OutputPath() + + "VideoQualityAnalysisTest.log"; + logfile_ = fopen(log_filename.c_str(), "w"); + ASSERT_TRUE(logfile_ != NULL); + } + static void TearDownTestCase() { + ASSERT_EQ(0, fclose(logfile_)); + } + static FILE* logfile_; +}; +FILE* VideoQualityAnalysisTest::logfile_ = NULL; -TEST(VideoQualityAnalysisTest, PrintAnalysisResultsEmpty) { +TEST_F(VideoQualityAnalysisTest, PrintAnalysisResultsEmpty) { ResultsContainer result; - PrintAnalysisResults("Empty", &result); + PrintAnalysisResults(logfile_, "Empty", &result); } -TEST(VideoQualityAnalysisTest, PrintAnalysisResultsOneFrame) { +TEST_F(VideoQualityAnalysisTest, PrintAnalysisResultsOneFrame) { ResultsContainer result; result.frames.push_back(AnalysisResult(0, 35.0, 0.9)); - PrintAnalysisResults("OneFrame", &result); + PrintAnalysisResults(logfile_, "OneFrame", &result); } -TEST(VideoQualityAnalysisTest, PrintAnalysisResultsThreeFrames) { +TEST_F(VideoQualityAnalysisTest, PrintAnalysisResultsThreeFrames) { ResultsContainer result; result.frames.push_back(AnalysisResult(0, 35.0, 0.9)); result.frames.push_back(AnalysisResult(1, 34.0, 0.8)); result.frames.push_back(AnalysisResult(2, 33.0, 0.7)); - PrintAnalysisResults("ThreeFrames", &result); + PrintAnalysisResults(logfile_, "ThreeFrames", &result); } -TEST(VideoQualityAnalysisTest, PrintMaxRepeatedAndSkippedFramesInvalidFile) { +TEST_F(VideoQualityAnalysisTest, PrintMaxRepeatedAndSkippedFramesInvalidFile) { std::string stats_filename = OutputPath() + "non-existing-stats-file.txt"; remove(stats_filename.c_str()); - PrintMaxRepeatedAndSkippedFrames("NonExistingStatsFile", stats_filename); + PrintMaxRepeatedAndSkippedFrames(logfile_, "NonExistingStatsFile", + stats_filename); } -TEST(VideoQualityAnalysisTest, PrintMaxRepeatedAndSkippedFramesEmptyStatsFile) { +TEST_F(VideoQualityAnalysisTest, + PrintMaxRepeatedAndSkippedFramesEmptyStatsFile) { std::string stats_filename = OutputPath() + "empty-stats.txt"; std::ofstream stats_file; stats_file.open(stats_filename.c_str()); stats_file.close(); - PrintMaxRepeatedAndSkippedFrames("EmptyStatsFile", stats_filename); + PrintMaxRepeatedAndSkippedFrames(logfile_, "EmptyStatsFile", stats_filename); } -TEST(VideoQualityAnalysisTest, PrintMaxRepeatedAndSkippedFramesNormalFile) { +TEST_F(VideoQualityAnalysisTest, PrintMaxRepeatedAndSkippedFramesNormalFile) { std::string stats_filename = OutputPath() + "stats.txt"; std::ofstream stats_file; stats_file.open(stats_filename.c_str()); @@ -65,7 +83,7 @@ TEST(VideoQualityAnalysisTest, PrintMaxRepeatedAndSkippedFramesNormalFile) { stats_file << "frame_0004 0106\n"; stats_file.close(); - PrintMaxRepeatedAndSkippedFrames("NormalStatsFile", stats_filename); + PrintMaxRepeatedAndSkippedFrames(logfile_, "NormalStatsFile", stats_filename); } diff --git a/media/webrtc/trunk/webrtc/tools/tools.gyp b/media/webrtc/trunk/webrtc/tools/tools.gyp index 386f0257e369..b8dc4c142d49 100644 --- a/media/webrtc/trunk/webrtc/tools/tools.gyp +++ b/media/webrtc/trunk/webrtc/tools/tools.gyp @@ -17,14 +17,6 @@ 'dependencies': [ '<(DEPTH)/third_party/libyuv/libyuv.gyp:libyuv', ], - 'include_dirs': [ - 'frame_analyzer', - ], - 'direct_dependent_settings': { - 'include_dirs': [ - 'frame_analyzer', - ], - }, 'export_dependent_settings': [ '<(DEPTH)/third_party/libyuv/libyuv.gyp:libyuv', ], @@ -108,6 +100,19 @@ 'conditions': [ ['include_tests==1', { 'targets' : [ + { + 'target_name': 'audio_e2e_harness', + 'type': 'executable', + 'dependencies': [ + '<(webrtc_root)/test/test.gyp:channel_transport', + '<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine', + '<(DEPTH)/testing/gtest.gyp:gtest', + '<(DEPTH)/third_party/gflags/gflags.gyp:gflags', + ], + 'sources': [ + 'e2e_quality/audio/audio_e2e_harness.cc', + ], + }, # audio_e2e_harness { 'target_name': 'tools_unittests', 'type': '<(gtest_target_type)', @@ -158,10 +163,10 @@ 'target_name': 'tools_unittests_run', 'type': 'none', 'dependencies': [ - '<(import_isolate_path):import_isolate_gypi', 'tools_unittests', ], 'includes': [ + '../build/isolate.gypi', 'tools_unittests.isolate', ], 'sources': [ diff --git a/media/webrtc/trunk/webrtc/tools/tools_unittests.isolate b/media/webrtc/trunk/webrtc/tools/tools_unittests.isolate index 14bd8995597e..540378059b94 100644 --- a/media/webrtc/trunk/webrtc/tools/tools_unittests.isolate +++ b/media/webrtc/trunk/webrtc/tools/tools_unittests.isolate @@ -21,18 +21,17 @@ 'variables': { 'command': [ '../../testing/test_env.py', - '../../tools/swarm_client/googletest/run_test_cases.py', '<(PRODUCT_DIR)/tools_unittests<(EXECUTABLE_SUFFIX)', ], 'isolate_dependency_tracked': [ '../../DEPS', '../../resources/foreman_cif.yuv', '../../testing/test_env.py', - '../../tools/swarm_client/run_isolated.py', - '../../tools/swarm_client/googletest/run_test_cases.py', - '../../tools/swarm_client/third_party/upload.py', '<(PRODUCT_DIR)/tools_unittests<(EXECUTABLE_SUFFIX)', ], + 'isolate_dependency_untracked': [ + '../../tools/swarming_client/', + ], }, }], ], diff --git a/media/webrtc/trunk/webrtc/tools/update_resources.py b/media/webrtc/trunk/webrtc/tools/update_resources.py deleted file mode 100755 index f68d7c6c4794..000000000000 --- a/media/webrtc/trunk/webrtc/tools/update_resources.py +++ /dev/null @@ -1,155 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. - -"""Downloads WebRTC resources files from a remote host.""" - -from optparse import OptionParser -from urlparse import urljoin -import os -import shutil -import sys -import tarfile -import tempfile -import urllib2 - -DESIRED_VERSION = 16 -REMOTE_URL_BASE = 'http://commondatastorage.googleapis.com/webrtc-resources' -VERSION_FILENAME = 'webrtc-resources-version' -FILENAME_PREFIX = 'webrtc-resources-' -EXTENSION = '.tgz' -RELATIVE_OUTPUT_PATH = '../../' - - -def main(): - """ - Downloads WebRTC resources files from a remote host. - - This script will download WebRTC resource files used for testing, like audio - and video files. It will check the current version in the DEPS file and - compare it with the one downloaded (kept in a text file in the download dir). - If the DEPS version is different than the one downloaded, the correct version - will be downloaded. - """ - - # Make it possible to skip download using an environment variable: - if os.getenv('WEBRTC_SKIP_RESOURCES_DOWNLOAD'): - print 'Skipping resources download since WEBRTC_SKIP_RESOURCES_DOWNLOAD set' - return - - # Define and parse arguments. - parser = OptionParser() - parser.add_option('-f', '--force', action='store_true', dest='force', - help='forces download and removal of existing resources.') - parser.add_option('-b', '--base_url', dest='base_url', - help= 'Overrides the default Base URL (%s) and uses the ' - 'supplied URL instead.' % REMOTE_URL_BASE) - parser.add_option('-p', dest='path', help= 'path of resources directory' - 'relative to this script', default=RELATIVE_OUTPUT_PATH) - options = parser.parse_args()[0] - - project_root_dir = os.path.normpath(sys.path[0] + '/' + options.path) - downloads_dir = os.path.join(project_root_dir, 'resources') - current_version_file = os.path.join(downloads_dir, VERSION_FILENAME) - - # Ensure the downloads dir is created. - if not os.path.isdir(downloads_dir): - os.mkdir(downloads_dir) - - # Download archive if forced or DEPS version is different than our current. - current_version = _get_current_version(current_version_file) - if DESIRED_VERSION != current_version or options.force: - base_url = options.base_url or REMOTE_URL_BASE - _perform_download(base_url, DESIRED_VERSION, downloads_dir) - else: - print 'Already have correct version: %s' % current_version - - -def _get_current_version(current_version_file): - """Returns the version already downloaded (if any). - - Args: - current_version_file: The filename of the text file containing the - currently downloaded version (if any) on local disk. - Returns: - The version number, or 0 if no downloaded version exists. - """ - current_version = 0 - if os.path.isfile(current_version_file): - f = open(current_version_file) - current_version = int(f.read()) - f.close() - print 'Found downloaded resources: version: %s' % current_version - return current_version - - -def _perform_download(base_url, desired_version, downloads_dir): - """Performs the download and extracts the downloaded resources. - - Args: - base_url: URL that holds the resource downloads. - desired_version: Desired version, which decides the filename. - """ - temp_dir = tempfile.mkdtemp(prefix='webrtc-resources-') - try: - archive_name = '%s%s%s' % (FILENAME_PREFIX, desired_version, EXTENSION) - # urljoin requires base URL to end with slash to construct a proper URL - # to our file: - if not base_url[-1:] == '/': - base_url += '/' - remote_archive_url = urljoin(base_url, archive_name) - # Download into the temporary directory with display of progress, inspired - # by the Stack Overflow post at http://goo.gl/JIrbo - temp_filename = os.path.join(temp_dir, archive_name) - print 'Downloading: %s' % remote_archive_url - - response = urllib2.urlopen(remote_archive_url) - temp_file = open(temp_filename, 'wb') - meta = response.info() - file_size_kb = int(meta.getheaders('Content-Length')[0]) / 1024 - print 'Progress: %s : %s kB' % (archive_name, file_size_kb) - - file_size_dl_kb = 0 - block_size = 65536 - while True: - file_buffer = response.read(block_size) - if not file_buffer: - break - file_size_dl_kb += len(file_buffer) / 1024 - temp_file.write(file_buffer) - status = r'%10d kB [%3.2f%%]' % (file_size_dl_kb, - file_size_dl_kb * 100. / file_size_kb) - status += chr(8) * (len(status) + 1) - print status, - print - temp_file.close() - - # Clean up the existing resources dir. - print 'Removing old resources in %s' % downloads_dir - shutil.rmtree(downloads_dir) - os.mkdir(downloads_dir) - - # Extract the archive. - archive = tarfile.open(temp_filename, 'r:gz') - archive.extractall(downloads_dir) - archive.close() - print 'Extracted resource files into %s' % downloads_dir - - # Write the downloaded version to a text file in the resources dir to avoid - # re-download of the same version in the future. - new_version_filename = os.path.join(downloads_dir, VERSION_FILENAME) - version_file = open(new_version_filename, 'w') - version_file.write('%d' % desired_version) - version_file.close() - - finally: - # Clean up the temp dir. - shutil.rmtree(temp_dir) - -if __name__ == '__main__': - main() diff --git a/media/webrtc/trunk/webrtc/video_engine/new_include/transport.h b/media/webrtc/trunk/webrtc/transport.h similarity index 86% rename from media/webrtc/trunk/webrtc/video_engine/new_include/transport.h rename to media/webrtc/trunk/webrtc/transport.h index f83e1e7ec0c2..c44c5b2cc599 100644 --- a/media/webrtc/trunk/webrtc/video_engine/new_include/transport.h +++ b/media/webrtc/trunk/webrtc/transport.h @@ -20,8 +20,8 @@ namespace newapi { class Transport { public: - virtual bool SendRTP(const uint8_t* packet, size_t length) = 0; - virtual bool SendRTCP(const uint8_t* packet, size_t length) = 0; + virtual bool SendRtp(const uint8_t* packet, size_t length) = 0; + virtual bool SendRtcp(const uint8_t* packet, size_t length) = 0; protected: virtual ~Transport() {} diff --git a/media/webrtc/trunk/webrtc/typedefs.h b/media/webrtc/trunk/webrtc/typedefs.h index 74ecd2691680..36d227d344eb 100644 --- a/media/webrtc/trunk/webrtc/typedefs.h +++ b/media/webrtc/trunk/webrtc/typedefs.h @@ -25,30 +25,26 @@ // http://msdn.microsoft.com/en-us/library/b0084kay.aspx // http://www.agner.org/optimize/calling_conventions.pdf // or with gcc, run: "echo | gcc -E -dM -" -// TODO(andrew): replace WEBRTC_LITTLE_ENDIAN with WEBRTC_ARCH_LITTLE_ENDIAN. #if defined(_M_X64) || defined(__x86_64__) #define WEBRTC_ARCH_X86_FAMILY #define WEBRTC_ARCH_X86_64 #define WEBRTC_ARCH_64_BITS #define WEBRTC_ARCH_LITTLE_ENDIAN -#define WEBRTC_LITTLE_ENDIAN #elif defined(_M_IX86) || defined(__i386__) #define WEBRTC_ARCH_X86_FAMILY #define WEBRTC_ARCH_X86 #define WEBRTC_ARCH_32_BITS #define WEBRTC_ARCH_LITTLE_ENDIAN -#define WEBRTC_LITTLE_ENDIAN #elif defined(__ARMEL__) -// TODO(andrew): We'd prefer to control platform defines here, but this is +// TODO(ajm): We'd prefer to control platform defines here, but this is // currently provided by the Android makefiles. Commented to avoid duplicate // definition warnings. //#define WEBRTC_ARCH_ARM -// TODO(andrew): Chromium uses the following two defines. Should we switch? +// TODO(ajm): Chromium uses the following two defines. Should we switch? //#define WEBRTC_ARCH_ARM_FAMILY //#define WEBRTC_ARCH_ARMEL #define WEBRTC_ARCH_32_BITS #define WEBRTC_ARCH_LITTLE_ENDIAN -#define WEBRTC_LITTLE_ENDIAN #elif defined(__powerpc64__) #define WEBRTC_ARCH_PPC64 1 #define WEBRTC_ARCH_64_BITS 1 @@ -137,6 +133,10 @@ #error Please add support for your architecture in typedefs.h #endif +#if !(defined(WEBRTC_ARCH_LITTLE_ENDIAN) ^ defined(WEBRTC_ARCH_BIG_ENDIAN)) +#error Define either WEBRTC_ARCH_LITTLE_ENDIAN or WEBRTC_ARCH_BIG_ENDIAN +#endif + #if defined(__SSE2__) || defined(_MSC_VER) #define WEBRTC_USE_SSE2 #endif @@ -169,8 +169,25 @@ typedef unsigned __int64 uint64_t; // (because it won't see this pragma). #pragma clang diagnostic ignored "-Wc++11-extensions" #define OVERRIDE override +#elif defined(__GNUC__) && __cplusplus >= 201103 && \ + (__GNUC__ * 10000 + __GNUC_MINOR__ * 100) >= 40700 +// GCC 4.7 supports explicit virtual overrides when C++11 support is enabled. +#define OVERRIDE override #else #define OVERRIDE #endif +// Annotate a function indicating the caller must examine the return value. +// Use like: +// int foo() WARN_UNUSED_RESULT; +// TODO(ajm): Hack to avoid multiple definitions until the base/ of webrtc and +// libjingle are merged. +#if !defined(WARN_UNUSED_RESULT) +#if defined(__GNUC__) +#define WARN_UNUSED_RESULT __attribute__((warn_unused_result)) +#else +#define WARN_UNUSED_RESULT +#endif +#endif // WARN_UNUSED_RESULT + #endif // WEBRTC_TYPEDEFS_H_ diff --git a/media/webrtc/trunk/webrtc/video/OWNERS b/media/webrtc/trunk/webrtc/video/OWNERS new file mode 100644 index 000000000000..506407499f38 --- /dev/null +++ b/media/webrtc/trunk/webrtc/video/OWNERS @@ -0,0 +1,4 @@ +mflodman@webrtc.org +stefan@webrtc.org +wu@webrtc.org +mallinath@webrtc.org diff --git a/media/webrtc/trunk/webrtc/video/bitrate_estimator_tests.cc b/media/webrtc/trunk/webrtc/video/bitrate_estimator_tests.cc new file mode 100644 index 000000000000..58b196dbdf82 --- /dev/null +++ b/media/webrtc/trunk/webrtc/video/bitrate_estimator_tests.cc @@ -0,0 +1,301 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include +#include +#include + +#include "testing/gtest/include/gtest/gtest.h" + +#include "webrtc/call.h" +#include "webrtc/system_wrappers/interface/critical_section_wrapper.h" +#include "webrtc/system_wrappers/interface/event_wrapper.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/test/direct_transport.h" +#include "webrtc/test/fake_decoder.h" +#include "webrtc/test/fake_encoder.h" +#include "webrtc/test/frame_generator_capturer.h" + +namespace webrtc { + +static const int kTOFExtensionId = 4; +static const int kASTExtensionId = 5; + +static unsigned int kDefaultTimeoutMs = 30 * 1000; +static const uint32_t kSendSsrc = 0x654321; +static const uint32_t kReceiverLocalSsrc = 0x123456; +static const uint8_t kSendPayloadType = 125; + +class BitrateEstimatorTest : public ::testing::Test { + public: + BitrateEstimatorTest() + : receiver_trace_(), + send_transport_(), + receive_transport_(), + sender_call_(), + receiver_call_(), + send_config_(), + receive_config_(), + streams_() { + } + + virtual ~BitrateEstimatorTest() { + EXPECT_TRUE(streams_.empty()); + } + + virtual void SetUp() { + // Create receiver call first so that we are guaranteed to have a trace + // callback when sender call is created. + Call::Config receiver_call_config(&receive_transport_); + receiver_call_config.trace_callback = &receiver_trace_; + receiver_call_.reset(Call::Create(receiver_call_config)); + + Call::Config sender_call_config(&send_transport_); + sender_call_.reset(Call::Create(sender_call_config)); + + send_transport_.SetReceiver(receiver_call_->Receiver()); + receive_transport_.SetReceiver(sender_call_->Receiver()); + + send_config_ = sender_call_->GetDefaultSendConfig(); + send_config_.rtp.ssrcs.push_back(kSendSsrc); + // send_config_.encoder will be set by every stream separately. + send_config_.internal_source = false; + test::FakeEncoder::SetCodecSettings(&send_config_.codec, 1); + send_config_.codec.plType = kSendPayloadType; + + receive_config_ = receiver_call_->GetDefaultReceiveConfig(); + receive_config_.codecs.clear(); + receive_config_.codecs.push_back(send_config_.codec); + // receive_config_.external_decoders will be set by every stream separately. + receive_config_.rtp.remote_ssrc = send_config_.rtp.ssrcs[0]; + receive_config_.rtp.local_ssrc = kReceiverLocalSsrc; + receive_config_.rtp.extensions.push_back( + RtpExtension(RtpExtension::kTOffset, kTOFExtensionId)); + receive_config_.rtp.extensions.push_back( + RtpExtension(RtpExtension::kAbsSendTime, kASTExtensionId)); + } + + virtual void TearDown() { + std::for_each(streams_.begin(), streams_.end(), + std::mem_fun(&Stream::StopSending)); + + send_transport_.StopSending(); + receive_transport_.StopSending(); + + while (!streams_.empty()) { + delete streams_.back(); + streams_.pop_back(); + } + + // The TraceCallback instance MUST outlive Calls, destroy Calls explicitly. + receiver_call_.reset(); + } + + protected: + friend class Stream; + + class TraceObserver : public TraceCallback { + public: + TraceObserver() + : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()), + received_log_lines_(), + expected_log_lines_(), + done_(EventWrapper::Create()) { + } + + void PushExpectedLogLine(const std::string& expected_log_line) { + CriticalSectionScoped cs(crit_sect_.get()); + expected_log_lines_.push_back(expected_log_line); + } + + virtual void Print(TraceLevel level, + const char* message, + int length) OVERRIDE { + CriticalSectionScoped cs(crit_sect_.get()); + if (!(level & kTraceStateInfo)) { + return; + } + std::string msg(message); + if (msg.find("BitrateEstimator") != std::string::npos) { + received_log_lines_.push_back(msg); + } + int num_popped = 0; + while (!received_log_lines_.empty() && !expected_log_lines_.empty()) { + std::string a = received_log_lines_.front(); + std::string b = expected_log_lines_.front(); + received_log_lines_.pop_front(); + expected_log_lines_.pop_front(); + num_popped++; + EXPECT_TRUE(a.find(b) != std::string::npos); + } + if (expected_log_lines_.size() <= 0) { + if (num_popped > 0) { + done_->Set(); + } + return; + } + } + + EventTypeWrapper Wait() { return done_->Wait(kDefaultTimeoutMs); } + + private: + typedef std::list Strings; + scoped_ptr crit_sect_; + Strings received_log_lines_; + Strings expected_log_lines_; + scoped_ptr done_; + }; + + class Stream { + public: + explicit Stream(BitrateEstimatorTest* test) + : test_(test), + is_sending_receiving_(false), + send_stream_(NULL), + receive_stream_(NULL), + frame_generator_capturer_(), + fake_encoder_(Clock::GetRealTimeClock()), + fake_decoder_() { + test_->send_config_.rtp.ssrcs[0]++; + test_->send_config_.encoder = &fake_encoder_; + send_stream_ = + test_->sender_call_->CreateVideoSendStream(test_->send_config_); + frame_generator_capturer_.reset( + test::FrameGeneratorCapturer::Create(send_stream_->Input(), + test_->send_config_.codec.width, + test_->send_config_.codec.height, + 30, + Clock::GetRealTimeClock())); + send_stream_->StartSending(); + frame_generator_capturer_->Start(); + + ExternalVideoDecoder decoder; + decoder.decoder = &fake_decoder_; + decoder.payload_type = test_->send_config_.codec.plType; + test_->receive_config_.rtp.remote_ssrc = test_->send_config_.rtp.ssrcs[0]; + test_->receive_config_.rtp.local_ssrc++; + test_->receive_config_.external_decoders.push_back(decoder); + receive_stream_ = test_->receiver_call_->CreateVideoReceiveStream( + test_->receive_config_); + receive_stream_->StartReceiving(); + + is_sending_receiving_ = true; + } + + ~Stream() { + frame_generator_capturer_.reset(NULL); + test_->sender_call_->DestroyVideoSendStream(send_stream_); + send_stream_ = NULL; + test_->receiver_call_->DestroyVideoReceiveStream(receive_stream_); + receive_stream_ = NULL; + } + + void StopSending() { + if (is_sending_receiving_) { + frame_generator_capturer_->Stop(); + send_stream_->StopSending(); + receive_stream_->StopReceiving(); + is_sending_receiving_ = false; + } + } + + private: + BitrateEstimatorTest* test_; + bool is_sending_receiving_; + VideoSendStream* send_stream_; + VideoReceiveStream* receive_stream_; + scoped_ptr frame_generator_capturer_; + test::FakeEncoder fake_encoder_; + test::FakeDecoder fake_decoder_; + }; + + TraceObserver receiver_trace_; + test::DirectTransport send_transport_; + test::DirectTransport receive_transport_; + scoped_ptr sender_call_; + scoped_ptr receiver_call_; + VideoSendStream::Config send_config_; + VideoReceiveStream::Config receive_config_; + std::vector streams_; +}; + +TEST_F(BitrateEstimatorTest, InstantiatesTOFPerDefault) { + send_config_.rtp.extensions.push_back( + RtpExtension(RtpExtension::kTOffset, kTOFExtensionId)); + receiver_trace_.PushExpectedLogLine( + "RemoteBitrateEstimatorFactory: Instantiating."); + receiver_trace_.PushExpectedLogLine( + "RemoteBitrateEstimatorFactory: Instantiating."); + streams_.push_back(new Stream(this)); + EXPECT_EQ(kEventSignaled, receiver_trace_.Wait()); +} + +TEST_F(BitrateEstimatorTest, ImmediatelySwitchToAST) { + send_config_.rtp.extensions.push_back( + RtpExtension(RtpExtension::kAbsSendTime, kASTExtensionId)); + receiver_trace_.PushExpectedLogLine( + "RemoteBitrateEstimatorFactory: Instantiating."); + receiver_trace_.PushExpectedLogLine( + "RemoteBitrateEstimatorFactory: Instantiating."); + receiver_trace_.PushExpectedLogLine("Switching to absolute send time RBE."); + receiver_trace_.PushExpectedLogLine( + "AbsoluteSendTimeRemoteBitrateEstimatorFactory: Instantiating."); + streams_.push_back(new Stream(this)); + EXPECT_EQ(kEventSignaled, receiver_trace_.Wait()); +} + +TEST_F(BitrateEstimatorTest, SwitchesToAST) { + send_config_.rtp.extensions.push_back( + RtpExtension(RtpExtension::kTOffset, kTOFExtensionId)); + receiver_trace_.PushExpectedLogLine( + "RemoteBitrateEstimatorFactory: Instantiating."); + receiver_trace_.PushExpectedLogLine( + "RemoteBitrateEstimatorFactory: Instantiating."); + streams_.push_back(new Stream(this)); + EXPECT_EQ(kEventSignaled, receiver_trace_.Wait()); + + send_config_.rtp.extensions[0] = + RtpExtension(RtpExtension::kAbsSendTime, kASTExtensionId); + receiver_trace_.PushExpectedLogLine("Switching to absolute send time RBE."); + receiver_trace_.PushExpectedLogLine( + "AbsoluteSendTimeRemoteBitrateEstimatorFactory: Instantiating."); + streams_.push_back(new Stream(this)); + EXPECT_EQ(kEventSignaled, receiver_trace_.Wait()); +} + +TEST_F(BitrateEstimatorTest, SwitchesToASTThenBackToTOF) { + send_config_.rtp.extensions.push_back( + RtpExtension(RtpExtension::kTOffset, kTOFExtensionId)); + receiver_trace_.PushExpectedLogLine( + "RemoteBitrateEstimatorFactory: Instantiating."); + receiver_trace_.PushExpectedLogLine( + "RemoteBitrateEstimatorFactory: Instantiating."); + streams_.push_back(new Stream(this)); + EXPECT_EQ(kEventSignaled, receiver_trace_.Wait()); + + send_config_.rtp.extensions[0] = + RtpExtension(RtpExtension::kAbsSendTime, kASTExtensionId); + receiver_trace_.PushExpectedLogLine("Switching to absolute send time RBE."); + receiver_trace_.PushExpectedLogLine( + "AbsoluteSendTimeRemoteBitrateEstimatorFactory: Instantiating."); + streams_.push_back(new Stream(this)); + EXPECT_EQ(kEventSignaled, receiver_trace_.Wait()); + + send_config_.rtp.extensions[0] = + RtpExtension(RtpExtension::kTOffset, kTOFExtensionId); + receiver_trace_.PushExpectedLogLine( + "WrappingBitrateEstimator: Switching to transmission time offset RBE."); + receiver_trace_.PushExpectedLogLine( + "RemoteBitrateEstimatorFactory: Instantiating."); + streams_.push_back(new Stream(this)); + streams_[0]->StopSending(); + streams_[1]->StopSending(); + EXPECT_EQ(kEventSignaled, receiver_trace_.Wait()); +} +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/video/call.cc b/media/webrtc/trunk/webrtc/video/call.cc new file mode 100644 index 000000000000..7c4699e44dfa --- /dev/null +++ b/media/webrtc/trunk/webrtc/video/call.cc @@ -0,0 +1,427 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include + +#include +#include + +#include "webrtc/call.h" +#include "webrtc/common.h" +#include "webrtc/config.h" +#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h" +#include "webrtc/system_wrappers/interface/critical_section_wrapper.h" +#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/system_wrappers/interface/trace.h" +#include "webrtc/video/video_receive_stream.h" +#include "webrtc/video/video_send_stream.h" +#include "webrtc/video_engine/include/vie_base.h" +#include "webrtc/video_engine/include/vie_codec.h" +#include "webrtc/video_engine/include/vie_rtp_rtcp.h" + +namespace webrtc { +const char* RtpExtension::kTOffset = "urn:ietf:params:rtp-hdrext:toffset"; +const char* RtpExtension::kAbsSendTime = + "http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time"; +namespace internal { + +class CpuOveruseObserverProxy : public webrtc::CpuOveruseObserver { + public: + CpuOveruseObserverProxy(OveruseCallback* overuse_callback) + : crit_(CriticalSectionWrapper::CreateCriticalSection()), + overuse_callback_(overuse_callback) { + assert(overuse_callback != NULL); + } + + virtual ~CpuOveruseObserverProxy() {} + + virtual void OveruseDetected() OVERRIDE { + CriticalSectionScoped cs(crit_.get()); + overuse_callback_->OnOveruse(); + } + + virtual void NormalUsage() OVERRIDE { + CriticalSectionScoped cs(crit_.get()); + overuse_callback_->OnNormalUse(); + } + + private: + scoped_ptr crit_; + OveruseCallback* overuse_callback_; +}; + +class Call : public webrtc::Call, public PacketReceiver { + public: + Call(webrtc::VideoEngine* video_engine, const Call::Config& config); + virtual ~Call(); + + virtual PacketReceiver* Receiver() OVERRIDE; + virtual std::vector GetVideoCodecs() OVERRIDE; + + virtual VideoSendStream::Config GetDefaultSendConfig() OVERRIDE; + + virtual VideoSendStream* CreateVideoSendStream( + const VideoSendStream::Config& config) OVERRIDE; + + virtual void DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) + OVERRIDE; + + virtual VideoReceiveStream::Config GetDefaultReceiveConfig() OVERRIDE; + + virtual VideoReceiveStream* CreateVideoReceiveStream( + const VideoReceiveStream::Config& config) OVERRIDE; + + virtual void DestroyVideoReceiveStream( + webrtc::VideoReceiveStream* receive_stream) OVERRIDE; + + virtual uint32_t SendBitrateEstimate() OVERRIDE; + virtual uint32_t ReceiveBitrateEstimate() OVERRIDE; + + virtual bool DeliverPacket(const uint8_t* packet, size_t length) OVERRIDE; + + private: + bool DeliverRtcp(const uint8_t* packet, size_t length); + bool DeliverRtp(const RTPHeader& header, + const uint8_t* packet, + size_t length); + + Call::Config config_; + + std::map receive_ssrcs_; + scoped_ptr receive_lock_; + + std::map send_ssrcs_; + scoped_ptr send_lock_; + + scoped_ptr rtp_header_parser_; + + scoped_ptr overuse_observer_proxy_; + + VideoEngine* video_engine_; + ViERTP_RTCP* rtp_rtcp_; + ViECodec* codec_; + ViEBase* base_; + int base_channel_id_; + + DISALLOW_COPY_AND_ASSIGN(Call); +}; +} // namespace internal + +class TraceDispatcher : public TraceCallback { + public: + TraceDispatcher() + : lock_(CriticalSectionWrapper::CreateCriticalSection()), + filter_(kTraceNone) { + Trace::CreateTrace(); + VideoEngine::SetTraceCallback(this); + VideoEngine::SetTraceFilter(kTraceNone); + } + + ~TraceDispatcher() { + Trace::ReturnTrace(); + VideoEngine::SetTraceCallback(NULL); + } + + virtual void Print(TraceLevel level, + const char* message, + int length) OVERRIDE { + CriticalSectionScoped crit(lock_.get()); + for (std::map::iterator it = callbacks_.begin(); + it != callbacks_.end(); + ++it) { + if ((level & it->second->trace_filter) != kTraceNone) + it->second->trace_callback->Print(level, message, length); + } + } + + void RegisterCallback(Call* call, Call::Config* config) { + if (config->trace_callback == NULL) + return; + + CriticalSectionScoped crit(lock_.get()); + callbacks_[call] = config; + + filter_ |= config->trace_filter; + VideoEngine::SetTraceFilter(filter_); + } + + void DeregisterCallback(Call* call) { + CriticalSectionScoped crit(lock_.get()); + callbacks_.erase(call); + + filter_ = kTraceNone; + for (std::map::iterator it = callbacks_.begin(); + it != callbacks_.end(); + ++it) { + filter_ |= it->second->trace_filter; + } + + VideoEngine::SetTraceFilter(filter_); + } + + private: + scoped_ptr lock_; + unsigned int filter_; + std::map callbacks_; +}; + +namespace internal { +TraceDispatcher* global_trace_dispatcher = NULL; +} // internal + +void CreateTraceDispatcher() { + if (internal::global_trace_dispatcher == NULL) { + TraceDispatcher* dispatcher = new TraceDispatcher(); + // TODO(pbos): Atomic compare and exchange. + if (internal::global_trace_dispatcher == NULL) { + internal::global_trace_dispatcher = dispatcher; + } else { + delete dispatcher; + } + } +} + +Call* Call::Create(const Call::Config& config) { + CreateTraceDispatcher(); + + VideoEngine* video_engine = config.webrtc_config != NULL + ? VideoEngine::Create(*config.webrtc_config) + : VideoEngine::Create(); + assert(video_engine != NULL); + + return new internal::Call(video_engine, config); +} + +namespace internal { + +Call::Call(webrtc::VideoEngine* video_engine, const Call::Config& config) + : config_(config), + receive_lock_(RWLockWrapper::CreateRWLock()), + send_lock_(RWLockWrapper::CreateRWLock()), + rtp_header_parser_(RtpHeaderParser::Create()), + video_engine_(video_engine), + base_channel_id_(-1) { + assert(video_engine != NULL); + assert(config.send_transport != NULL); + + if (config.overuse_callback) { + overuse_observer_proxy_.reset( + new CpuOveruseObserverProxy(config.overuse_callback)); + } + + global_trace_dispatcher->RegisterCallback(this, &config_); + + rtp_rtcp_ = ViERTP_RTCP::GetInterface(video_engine_); + assert(rtp_rtcp_ != NULL); + + codec_ = ViECodec::GetInterface(video_engine_); + assert(codec_ != NULL); + + // As a workaround for non-existing calls in the old API, create a base + // channel used as default channel when creating send and receive streams. + base_ = ViEBase::GetInterface(video_engine_); + assert(base_ != NULL); + + base_->CreateChannel(base_channel_id_); + assert(base_channel_id_ != -1); +} + +Call::~Call() { + global_trace_dispatcher->DeregisterCallback(this); + base_->DeleteChannel(base_channel_id_); + base_->Release(); + codec_->Release(); + rtp_rtcp_->Release(); + webrtc::VideoEngine::Delete(video_engine_); +} + +PacketReceiver* Call::Receiver() { return this; } + +std::vector Call::GetVideoCodecs() { + std::vector codecs; + + VideoCodec codec; + for (size_t i = 0; i < static_cast(codec_->NumberOfCodecs()); ++i) { + if (codec_->GetCodec(static_cast(i), codec) == 0) { + codecs.push_back(codec); + } + } + return codecs; +} + +VideoSendStream::Config Call::GetDefaultSendConfig() { + VideoSendStream::Config config; + codec_->GetCodec(0, config.codec); + return config; +} + +VideoSendStream* Call::CreateVideoSendStream( + const VideoSendStream::Config& config) { + assert(config.rtp.ssrcs.size() > 0); + assert(config.rtp.ssrcs.size() >= config.codec.numberOfSimulcastStreams); + + VideoSendStream* send_stream = new VideoSendStream( + config_.send_transport, + overuse_observer_proxy_.get(), + video_engine_, + config, + base_channel_id_); + + WriteLockScoped write_lock(*send_lock_); + for (size_t i = 0; i < config.rtp.ssrcs.size(); ++i) { + assert(send_ssrcs_.find(config.rtp.ssrcs[i]) == send_ssrcs_.end()); + send_ssrcs_[config.rtp.ssrcs[i]] = send_stream; + } + return send_stream; +} + +void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) { + assert(send_stream != NULL); + + VideoSendStream* send_stream_impl = NULL; + { + WriteLockScoped write_lock(*send_lock_); + for (std::map::iterator it = + send_ssrcs_.begin(); + it != send_ssrcs_.end(); + ++it) { + if (it->second == static_cast(send_stream)) { + send_stream_impl = it->second; + send_ssrcs_.erase(it); + break; + } + } + } + + assert(send_stream_impl != NULL); + delete send_stream_impl; +} + +VideoReceiveStream::Config Call::GetDefaultReceiveConfig() { + VideoReceiveStream::Config config; + config.rtp.remb = true; + return config; +} + +VideoReceiveStream* Call::CreateVideoReceiveStream( + const VideoReceiveStream::Config& config) { + VideoReceiveStream* receive_stream = + new VideoReceiveStream(video_engine_, + config, + config_.send_transport, + config_.voice_engine, + base_channel_id_); + + WriteLockScoped write_lock(*receive_lock_); + assert(receive_ssrcs_.find(config.rtp.remote_ssrc) == receive_ssrcs_.end()); + receive_ssrcs_[config.rtp.remote_ssrc] = receive_stream; + // TODO(pbos): Configure different RTX payloads per receive payload. + VideoReceiveStream::Config::Rtp::RtxMap::const_iterator it = + config.rtp.rtx.begin(); + if (it != config.rtp.rtx.end()) + receive_ssrcs_[it->second.ssrc] = receive_stream; + + return receive_stream; +} + +void Call::DestroyVideoReceiveStream( + webrtc::VideoReceiveStream* receive_stream) { + assert(receive_stream != NULL); + + VideoReceiveStream* receive_stream_impl = NULL; + { + WriteLockScoped write_lock(*receive_lock_); + // Remove all ssrcs pointing to a receive stream. As RTX retransmits on a + // separate SSRC there can be either one or two. + std::map::iterator it = + receive_ssrcs_.begin(); + while (it != receive_ssrcs_.end()) { + if (it->second == static_cast(receive_stream)) { + assert(receive_stream_impl == NULL || + receive_stream_impl == it->second); + receive_stream_impl = it->second; + receive_ssrcs_.erase(it++); + } else { + ++it; + } + } + } + + assert(receive_stream_impl != NULL); + delete receive_stream_impl; +} + +uint32_t Call::SendBitrateEstimate() { + // TODO(pbos): Return send-bitrate estimate + return 0; +} + +uint32_t Call::ReceiveBitrateEstimate() { + // TODO(pbos): Return receive-bitrate estimate + return 0; +} + +bool Call::DeliverRtcp(const uint8_t* packet, size_t length) { + // TODO(pbos): Figure out what channel needs it actually. + // Do NOT broadcast! Also make sure it's a valid packet. + bool rtcp_delivered = false; + { + ReadLockScoped read_lock(*receive_lock_); + for (std::map::iterator it = + receive_ssrcs_.begin(); + it != receive_ssrcs_.end(); + ++it) { + if (it->second->DeliverRtcp(packet, length)) + rtcp_delivered = true; + } + } + + { + ReadLockScoped read_lock(*send_lock_); + for (std::map::iterator it = + send_ssrcs_.begin(); + it != send_ssrcs_.end(); + ++it) { + if (it->second->DeliverRtcp(packet, length)) + rtcp_delivered = true; + } + } + return rtcp_delivered; +} + +bool Call::DeliverRtp(const RTPHeader& header, + const uint8_t* packet, + size_t length) { + ReadLockScoped read_lock(*receive_lock_); + std::map::iterator it = + receive_ssrcs_.find(header.ssrc); + if (it == receive_ssrcs_.end()) { + // TODO(pbos): Log some warning, SSRC without receiver. + return false; + } + return it->second->DeliverRtp(static_cast(packet), length); +} + +bool Call::DeliverPacket(const uint8_t* packet, size_t length) { + // TODO(pbos): ExtensionMap if there are extensions. + if (RtpHeaderParser::IsRtcp(packet, static_cast(length))) + return DeliverRtcp(packet, length); + + RTPHeader rtp_header; + if (!rtp_header_parser_->Parse(packet, static_cast(length), &rtp_header)) + return false; + + return DeliverRtp(rtp_header, packet, length); +} + +} // namespace internal +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/video/call_perf_tests.cc b/media/webrtc/trunk/webrtc/video/call_perf_tests.cc new file mode 100644 index 000000000000..4766ff81a12e --- /dev/null +++ b/media/webrtc/trunk/webrtc/video/call_perf_tests.cc @@ -0,0 +1,355 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include + +#include +#include +#include + +#include "testing/gtest/include/gtest/gtest.h" + +#include "webrtc/call.h" +#include "webrtc/modules/remote_bitrate_estimator/include/rtp_to_ntp.h" +#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h" +#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h" +#include "webrtc/system_wrappers/interface/critical_section_wrapper.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/test/direct_transport.h" +#include "webrtc/test/fake_audio_device.h" +#include "webrtc/test/fake_decoder.h" +#include "webrtc/test/fake_encoder.h" +#include "webrtc/test/frame_generator.h" +#include "webrtc/test/frame_generator_capturer.h" +#include "webrtc/test/rtp_rtcp_observer.h" +#include "webrtc/test/testsupport/fileutils.h" +#include "webrtc/test/testsupport/perf_test.h" +#include "webrtc/video/transport_adapter.h" +#include "webrtc/voice_engine/include/voe_base.h" +#include "webrtc/voice_engine/include/voe_codec.h" +#include "webrtc/voice_engine/include/voe_network.h" +#include "webrtc/voice_engine/include/voe_rtp_rtcp.h" +#include "webrtc/voice_engine/include/voe_video_sync.h" + +namespace webrtc { + +static unsigned int kLongTimeoutMs = 120 * 1000; +static const uint32_t kSendSsrc = 0x654321; +static const uint32_t kReceiverLocalSsrc = 0x123456; +static const uint8_t kSendPayloadType = 125; + +class CallPerfTest : public ::testing::Test { + public: + CallPerfTest() + : send_stream_(NULL), fake_encoder_(Clock::GetRealTimeClock()) {} + protected: + VideoSendStream::Config GetSendTestConfig(Call* call) { + VideoSendStream::Config config = call->GetDefaultSendConfig(); + config.encoder = &fake_encoder_; + config.internal_source = false; + config.rtp.ssrcs.push_back(kSendSsrc); + test::FakeEncoder::SetCodecSettings(&config.codec, 1); + config.codec.plType = kSendPayloadType; + return config; + } + void RunVideoSendTest(Call* call, + const VideoSendStream::Config& config, + test::RtpRtcpObserver* observer) { + send_stream_ = call->CreateVideoSendStream(config); + scoped_ptr frame_generator_capturer( + test::FrameGeneratorCapturer::Create( + send_stream_->Input(), 320, 240, 30, Clock::GetRealTimeClock())); + send_stream_->StartSending(); + frame_generator_capturer->Start(); + + EXPECT_EQ(kEventSignaled, observer->Wait()); + + observer->StopSending(); + frame_generator_capturer->Stop(); + send_stream_->StopSending(); + call->DestroyVideoSendStream(send_stream_); + } + + VideoSendStream* send_stream_; + test::FakeEncoder fake_encoder_; +}; + +class SyncRtcpObserver : public test::RtpRtcpObserver { + public: + explicit SyncRtcpObserver(const FakeNetworkPipe::Config& config) + : test::RtpRtcpObserver(kLongTimeoutMs, config), + critical_section_(CriticalSectionWrapper::CreateCriticalSection()) {} + + virtual Action OnSendRtcp(const uint8_t* packet, size_t length) OVERRIDE { + RTCPUtility::RTCPParserV2 parser(packet, length, true); + EXPECT_TRUE(parser.IsValid()); + + for (RTCPUtility::RTCPPacketTypes packet_type = parser.Begin(); + packet_type != RTCPUtility::kRtcpNotValidCode; + packet_type = parser.Iterate()) { + if (packet_type == RTCPUtility::kRtcpSrCode) { + const RTCPUtility::RTCPPacket& packet = parser.Packet(); + synchronization::RtcpMeasurement ntp_rtp_pair( + packet.SR.NTPMostSignificant, + packet.SR.NTPLeastSignificant, + packet.SR.RTPTimestamp); + StoreNtpRtpPair(ntp_rtp_pair); + } + } + return SEND_PACKET; + } + + int64_t RtpTimestampToNtp(uint32_t timestamp) const { + CriticalSectionScoped cs(critical_section_.get()); + int64_t timestamp_in_ms = -1; + if (ntp_rtp_pairs_.size() == 2) { + // TODO(stefan): We can't EXPECT_TRUE on this call due to a bug in the + // RTCP sender where it sends RTCP SR before any RTP packets, which leads + // to a bogus NTP/RTP mapping. + synchronization::RtpToNtpMs(timestamp, ntp_rtp_pairs_, ×tamp_in_ms); + return timestamp_in_ms; + } + return -1; + } + + private: + void StoreNtpRtpPair(synchronization::RtcpMeasurement ntp_rtp_pair) { + CriticalSectionScoped cs(critical_section_.get()); + for (synchronization::RtcpList::iterator it = ntp_rtp_pairs_.begin(); + it != ntp_rtp_pairs_.end(); + ++it) { + if (ntp_rtp_pair.ntp_secs == it->ntp_secs && + ntp_rtp_pair.ntp_frac == it->ntp_frac) { + // This RTCP has already been added to the list. + return; + } + } + // We need two RTCP SR reports to map between RTP and NTP. More than two + // will not improve the mapping. + if (ntp_rtp_pairs_.size() == 2) { + ntp_rtp_pairs_.pop_back(); + } + ntp_rtp_pairs_.push_front(ntp_rtp_pair); + } + + scoped_ptr critical_section_; + synchronization::RtcpList ntp_rtp_pairs_; +}; + +class VideoRtcpAndSyncObserver : public SyncRtcpObserver, public VideoRenderer { + static const int kInSyncThresholdMs = 50; + static const int kStartupTimeMs = 2000; + static const int kMinRunTimeMs = 30000; + + public: + VideoRtcpAndSyncObserver(Clock* clock, + int voe_channel, + VoEVideoSync* voe_sync, + SyncRtcpObserver* audio_observer) + : SyncRtcpObserver(FakeNetworkPipe::Config()), + clock_(clock), + voe_channel_(voe_channel), + voe_sync_(voe_sync), + audio_observer_(audio_observer), + creation_time_ms_(clock_->TimeInMilliseconds()), + first_time_in_sync_(-1) {} + + virtual void RenderFrame(const I420VideoFrame& video_frame, + int time_to_render_ms) OVERRIDE { + int64_t now_ms = clock_->TimeInMilliseconds(); + uint32_t playout_timestamp = 0; + if (voe_sync_->GetPlayoutTimestamp(voe_channel_, playout_timestamp) != 0) + return; + int64_t latest_audio_ntp = + audio_observer_->RtpTimestampToNtp(playout_timestamp); + int64_t latest_video_ntp = RtpTimestampToNtp(video_frame.timestamp()); + if (latest_audio_ntp < 0 || latest_video_ntp < 0) + return; + int time_until_render_ms = + std::max(0, static_cast(video_frame.render_time_ms() - now_ms)); + latest_video_ntp += time_until_render_ms; + int64_t stream_offset = latest_audio_ntp - latest_video_ntp; + std::stringstream ss; + ss << stream_offset; + webrtc::test::PrintResult( + "stream_offset", "", "synchronization", ss.str(), "ms", false); + int64_t time_since_creation = now_ms - creation_time_ms_; + // During the first couple of seconds audio and video can falsely be + // estimated as being synchronized. We don't want to trigger on those. + if (time_since_creation < kStartupTimeMs) + return; + if (abs(latest_audio_ntp - latest_video_ntp) < kInSyncThresholdMs) { + if (first_time_in_sync_ == -1) { + first_time_in_sync_ = now_ms; + webrtc::test::PrintResult("sync_convergence_time", + "", + "synchronization", + time_since_creation, + "ms", + false); + } + if (time_since_creation > kMinRunTimeMs) + observation_complete_->Set(); + } + } + + private: + Clock* clock_; + int voe_channel_; + VoEVideoSync* voe_sync_; + SyncRtcpObserver* audio_observer_; + int64_t creation_time_ms_; + int64_t first_time_in_sync_; +}; + +TEST_F(CallPerfTest, PlaysOutAudioAndVideoInSync) { + VoiceEngine* voice_engine = VoiceEngine::Create(); + VoEBase* voe_base = VoEBase::GetInterface(voice_engine); + VoECodec* voe_codec = VoECodec::GetInterface(voice_engine); + VoENetwork* voe_network = VoENetwork::GetInterface(voice_engine); + VoEVideoSync* voe_sync = VoEVideoSync::GetInterface(voice_engine); + const std::string audio_filename = + test::ResourcePath("voice_engine/audio_long16", "pcm"); + ASSERT_STRNE("", audio_filename.c_str()); + test::FakeAudioDevice fake_audio_device(Clock::GetRealTimeClock(), + audio_filename); + EXPECT_EQ(0, voe_base->Init(&fake_audio_device, NULL)); + int channel = voe_base->CreateChannel(); + + FakeNetworkPipe::Config net_config; + net_config.queue_delay_ms = 500; + SyncRtcpObserver audio_observer(net_config); + VideoRtcpAndSyncObserver observer( + Clock::GetRealTimeClock(), channel, voe_sync, &audio_observer); + + Call::Config receiver_config(observer.ReceiveTransport()); + receiver_config.voice_engine = voice_engine; + scoped_ptr sender_call( + Call::Create(Call::Config(observer.SendTransport()))); + scoped_ptr receiver_call(Call::Create(receiver_config)); + CodecInst isac = {103, "ISAC", 16000, 480, 1, 32000}; + EXPECT_EQ(0, voe_codec->SetSendCodec(channel, isac)); + + class VoicePacketReceiver : public PacketReceiver { + public: + VoicePacketReceiver(int channel, VoENetwork* voe_network) + : channel_(channel), + voe_network_(voe_network), + parser_(RtpHeaderParser::Create()) {} + virtual bool DeliverPacket(const uint8_t* packet, size_t length) { + int ret; + if (parser_->IsRtcp(packet, static_cast(length))) { + ret = voe_network_->ReceivedRTCPPacket( + channel_, packet, static_cast(length)); + } else { + ret = voe_network_->ReceivedRTPPacket( + channel_, packet, static_cast(length)); + } + return ret == 0; + } + + private: + int channel_; + VoENetwork* voe_network_; + scoped_ptr parser_; + } voe_packet_receiver(channel, voe_network); + + audio_observer.SetReceivers(&voe_packet_receiver, &voe_packet_receiver); + + internal::TransportAdapter transport_adapter(audio_observer.SendTransport()); + transport_adapter.Enable(); + EXPECT_EQ(0, + voe_network->RegisterExternalTransport(channel, transport_adapter)); + + observer.SetReceivers(receiver_call->Receiver(), sender_call->Receiver()); + + test::FakeDecoder fake_decoder; + + VideoSendStream::Config send_config = GetSendTestConfig(sender_call.get()); + + VideoReceiveStream::Config receive_config = + receiver_call->GetDefaultReceiveConfig(); + receive_config.codecs.clear(); + receive_config.codecs.push_back(send_config.codec); + ExternalVideoDecoder decoder; + decoder.decoder = &fake_decoder; + decoder.payload_type = send_config.codec.plType; + receive_config.external_decoders.push_back(decoder); + receive_config.rtp.remote_ssrc = send_config.rtp.ssrcs[0]; + receive_config.rtp.local_ssrc = kReceiverLocalSsrc; + receive_config.renderer = &observer; + receive_config.audio_channel_id = channel; + + VideoSendStream* send_stream = + sender_call->CreateVideoSendStream(send_config); + VideoReceiveStream* receive_stream = + receiver_call->CreateVideoReceiveStream(receive_config); + scoped_ptr capturer( + test::FrameGeneratorCapturer::Create(send_stream->Input(), + send_config.codec.width, + send_config.codec.height, + 30, + Clock::GetRealTimeClock())); + receive_stream->StartReceiving(); + send_stream->StartSending(); + capturer->Start(); + + fake_audio_device.Start(); + EXPECT_EQ(0, voe_base->StartPlayout(channel)); + EXPECT_EQ(0, voe_base->StartReceive(channel)); + EXPECT_EQ(0, voe_base->StartSend(channel)); + + EXPECT_EQ(kEventSignaled, observer.Wait()) + << "Timed out while waiting for audio and video to be synchronized."; + + EXPECT_EQ(0, voe_base->StopSend(channel)); + EXPECT_EQ(0, voe_base->StopReceive(channel)); + EXPECT_EQ(0, voe_base->StopPlayout(channel)); + fake_audio_device.Stop(); + + capturer->Stop(); + send_stream->StopSending(); + receive_stream->StopReceiving(); + observer.StopSending(); + audio_observer.StopSending(); + + voe_base->DeleteChannel(channel); + voe_base->Release(); + voe_codec->Release(); + voe_network->Release(); + voe_sync->Release(); + sender_call->DestroyVideoSendStream(send_stream); + receiver_call->DestroyVideoReceiveStream(receive_stream); + VoiceEngine::Delete(voice_engine); +} + +TEST_F(CallPerfTest, RegisterCpuOveruseObserver) { + // Verifies that either a normal or overuse callback is triggered. + class OveruseCallbackObserver : public test::RtpRtcpObserver, + public webrtc::OveruseCallback { + public: + OveruseCallbackObserver() : RtpRtcpObserver(kLongTimeoutMs) {} + + virtual void OnOveruse() OVERRIDE { + observation_complete_->Set(); + } + virtual void OnNormalUse() OVERRIDE { + observation_complete_->Set(); + } + }; + + OveruseCallbackObserver observer; + Call::Config call_config(observer.SendTransport()); + call_config.overuse_callback = &observer; + scoped_ptr call(Call::Create(call_config)); + + VideoSendStream::Config send_config = GetSendTestConfig(call.get()); + RunVideoSendTest(call.get(), send_config, &observer); +} +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/video/call_tests.cc b/media/webrtc/trunk/webrtc/video/call_tests.cc new file mode 100644 index 000000000000..a945f64ceaaa --- /dev/null +++ b/media/webrtc/trunk/webrtc/video/call_tests.cc @@ -0,0 +1,1478 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include + +#include +#include +#include +#include + +#include "testing/gtest/include/gtest/gtest.h" + +#include "webrtc/call.h" +#include "webrtc/frame_callback.h" +#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h" +#include "webrtc/system_wrappers/interface/critical_section_wrapper.h" +#include "webrtc/system_wrappers/interface/event_wrapper.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/system_wrappers/interface/sleep.h" +#include "webrtc/test/direct_transport.h" +#include "webrtc/test/fake_audio_device.h" +#include "webrtc/test/fake_decoder.h" +#include "webrtc/test/fake_encoder.h" +#include "webrtc/test/frame_generator.h" +#include "webrtc/test/frame_generator_capturer.h" +#include "webrtc/test/null_transport.h" +#include "webrtc/test/rtp_rtcp_observer.h" +#include "webrtc/test/testsupport/fileutils.h" +#include "webrtc/test/testsupport/perf_test.h" +#include "webrtc/video/transport_adapter.h" + +namespace webrtc { + +static unsigned int kDefaultTimeoutMs = 30 * 1000; +static unsigned int kLongTimeoutMs = 120 * 1000; +static const uint32_t kSendSsrc = 0x654321; +static const uint32_t kSendRtxSsrc = 0x424242; +static const uint32_t kReceiverLocalSsrc = 0x123456; +static const uint8_t kSendPayloadType = 125; +static const uint8_t kSendRtxPayloadType = 126; + +class CallTest : public ::testing::Test { + public: + CallTest() + : send_stream_(NULL), + receive_stream_(NULL), + fake_encoder_(Clock::GetRealTimeClock()) {} + + virtual ~CallTest() { + EXPECT_EQ(NULL, send_stream_); + EXPECT_EQ(NULL, receive_stream_); + } + + protected: + void CreateCalls(const Call::Config& sender_config, + const Call::Config& receiver_config) { + sender_call_.reset(Call::Create(sender_config)); + receiver_call_.reset(Call::Create(receiver_config)); + } + + void CreateTestConfigs() { + send_config_ = sender_call_->GetDefaultSendConfig(); + receive_config_ = receiver_call_->GetDefaultReceiveConfig(); + + send_config_.rtp.ssrcs.push_back(kSendSsrc); + send_config_.encoder = &fake_encoder_; + send_config_.internal_source = false; + test::FakeEncoder::SetCodecSettings(&send_config_.codec, 1); + send_config_.codec.plType = kSendPayloadType; + + receive_config_.codecs.clear(); + receive_config_.codecs.push_back(send_config_.codec); + ExternalVideoDecoder decoder; + decoder.decoder = &fake_decoder_; + decoder.payload_type = send_config_.codec.plType; + receive_config_.external_decoders.push_back(decoder); + receive_config_.rtp.remote_ssrc = send_config_.rtp.ssrcs[0]; + receive_config_.rtp.local_ssrc = kReceiverLocalSsrc; + } + + void CreateStreams() { + assert(send_stream_ == NULL); + assert(receive_stream_ == NULL); + + send_stream_ = sender_call_->CreateVideoSendStream(send_config_); + receive_stream_ = receiver_call_->CreateVideoReceiveStream(receive_config_); + } + + void CreateFrameGenerator() { + frame_generator_capturer_.reset( + test::FrameGeneratorCapturer::Create(send_stream_->Input(), + send_config_.codec.width, + send_config_.codec.height, + 30, + Clock::GetRealTimeClock())); + } + + void StartSending() { + receive_stream_->StartReceiving(); + send_stream_->StartSending(); + if (frame_generator_capturer_.get() != NULL) + frame_generator_capturer_->Start(); + } + + void StopSending() { + if (frame_generator_capturer_.get() != NULL) + frame_generator_capturer_->Stop(); + if (send_stream_ != NULL) + send_stream_->StopSending(); + if (receive_stream_ != NULL) + receive_stream_->StopReceiving(); + } + + void DestroyStreams() { + if (send_stream_ != NULL) + sender_call_->DestroyVideoSendStream(send_stream_); + if (receive_stream_ != NULL) + receiver_call_->DestroyVideoReceiveStream(receive_stream_); + send_stream_ = NULL; + receive_stream_ = NULL; + } + + void DecodesRetransmittedFrame(bool retransmit_over_rtx); + void ReceivesPliAndRecovers(int rtp_history_ms); + void RespectsRtcpMode(newapi::RtcpMode rtcp_mode); + void TestXrReceiverReferenceTimeReport(bool enable_rrtr); + + scoped_ptr sender_call_; + scoped_ptr receiver_call_; + + VideoSendStream::Config send_config_; + VideoReceiveStream::Config receive_config_; + + VideoSendStream* send_stream_; + VideoReceiveStream* receive_stream_; + + scoped_ptr frame_generator_capturer_; + + test::FakeEncoder fake_encoder_; + test::FakeDecoder fake_decoder_; +}; + +class NackObserver : public test::RtpRtcpObserver { + static const int kNumberOfNacksToObserve = 2; + static const int kLossBurstSize = 2; + static const int kPacketsBetweenLossBursts = 9; + + public: + NackObserver() + : test::RtpRtcpObserver(kLongTimeoutMs), + rtp_parser_(RtpHeaderParser::Create()), + sent_rtp_packets_(0), + packets_left_to_drop_(0), + nacks_left_(kNumberOfNacksToObserve) {} + + private: + virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE { + RTPHeader header; + EXPECT_TRUE(rtp_parser_->Parse(packet, static_cast(length), &header)); + + // Never drop retransmitted packets. + if (dropped_packets_.find(header.sequenceNumber) != + dropped_packets_.end()) { + retransmitted_packets_.insert(header.sequenceNumber); + if (nacks_left_ == 0 && + retransmitted_packets_.size() == dropped_packets_.size()) { + observation_complete_->Set(); + } + return SEND_PACKET; + } + + ++sent_rtp_packets_; + + // Enough NACKs received, stop dropping packets. + if (nacks_left_ == 0) + return SEND_PACKET; + + // Check if it's time for a new loss burst. + if (sent_rtp_packets_ % kPacketsBetweenLossBursts == 0) + packets_left_to_drop_ = kLossBurstSize; + + if (packets_left_to_drop_ > 0) { + --packets_left_to_drop_; + dropped_packets_.insert(header.sequenceNumber); + return DROP_PACKET; + } + + return SEND_PACKET; + } + + virtual Action OnReceiveRtcp(const uint8_t* packet, size_t length) OVERRIDE { + RTCPUtility::RTCPParserV2 parser(packet, length, true); + EXPECT_TRUE(parser.IsValid()); + + RTCPUtility::RTCPPacketTypes packet_type = parser.Begin(); + while (packet_type != RTCPUtility::kRtcpNotValidCode) { + if (packet_type == RTCPUtility::kRtcpRtpfbNackCode) { + --nacks_left_; + break; + } + packet_type = parser.Iterate(); + } + return SEND_PACKET; + } + + private: + scoped_ptr rtp_parser_; + std::set dropped_packets_; + std::set retransmitted_packets_; + uint64_t sent_rtp_packets_; + int packets_left_to_drop_; + int nacks_left_; +}; + +TEST_F(CallTest, UsesTraceCallback) { + const unsigned int kSenderTraceFilter = kTraceDebug; + const unsigned int kReceiverTraceFilter = kTraceDefault & (~kTraceDebug); + class TraceObserver : public TraceCallback { + public: + explicit TraceObserver(unsigned int filter) + : filter_(filter), messages_left_(50), done_(EventWrapper::Create()) {} + + virtual void Print(TraceLevel level, + const char* message, + int length) OVERRIDE { + EXPECT_EQ(0u, level & (~filter_)); + if (--messages_left_ == 0) + done_->Set(); + } + + EventTypeWrapper Wait() { return done_->Wait(kDefaultTimeoutMs); } + + private: + unsigned int filter_; + unsigned int messages_left_; + scoped_ptr done_; + } sender_trace(kSenderTraceFilter), receiver_trace(kReceiverTraceFilter); + + test::DirectTransport send_transport, receive_transport; + Call::Config sender_call_config(&send_transport); + sender_call_config.trace_callback = &sender_trace; + sender_call_config.trace_filter = kSenderTraceFilter; + Call::Config receiver_call_config(&receive_transport); + receiver_call_config.trace_callback = &receiver_trace; + receiver_call_config.trace_filter = kReceiverTraceFilter; + CreateCalls(sender_call_config, receiver_call_config); + send_transport.SetReceiver(receiver_call_->Receiver()); + receive_transport.SetReceiver(sender_call_->Receiver()); + + CreateTestConfigs(); + + CreateStreams(); + CreateFrameGenerator(); + StartSending(); + + // Wait() waits for a couple of trace callbacks to occur. + EXPECT_EQ(kEventSignaled, sender_trace.Wait()); + EXPECT_EQ(kEventSignaled, receiver_trace.Wait()); + + StopSending(); + send_transport.StopSending(); + receive_transport.StopSending(); + DestroyStreams(); + + // The TraceCallback instance MUST outlive Calls, destroy Calls explicitly. + sender_call_.reset(); + receiver_call_.reset(); +} + +TEST_F(CallTest, ReceiverCanBeStartedTwice) { + test::NullTransport transport; + CreateCalls(Call::Config(&transport), Call::Config(&transport)); + + CreateTestConfigs(); + CreateStreams(); + + receive_stream_->StartReceiving(); + receive_stream_->StartReceiving(); + + DestroyStreams(); +} + +TEST_F(CallTest, ReceiverCanBeStoppedTwice) { + test::NullTransport transport; + CreateCalls(Call::Config(&transport), Call::Config(&transport)); + + CreateTestConfigs(); + CreateStreams(); + + receive_stream_->StopReceiving(); + receive_stream_->StopReceiving(); + + DestroyStreams(); +} + +TEST_F(CallTest, RendersSingleDelayedFrame) { + static const int kWidth = 320; + static const int kHeight = 240; + // This constant is chosen to be higher than the timeout in the video_render + // module. This makes sure that frames aren't dropped if there are no other + // frames in the queue. + static const int kDelayRenderCallbackMs = 1000; + + class Renderer : public VideoRenderer { + public: + Renderer() : event_(EventWrapper::Create()) {} + + virtual void RenderFrame(const I420VideoFrame& video_frame, + int /*time_to_render_ms*/) OVERRIDE { + event_->Set(); + } + + EventTypeWrapper Wait() { return event_->Wait(kDefaultTimeoutMs); } + + scoped_ptr event_; + } renderer; + + class TestFrameCallback : public I420FrameCallback { + public: + TestFrameCallback() : event_(EventWrapper::Create()) {} + + EventTypeWrapper Wait() { return event_->Wait(kDefaultTimeoutMs); } + + private: + virtual void FrameCallback(I420VideoFrame* frame) OVERRIDE { + SleepMs(kDelayRenderCallbackMs); + event_->Set(); + } + + scoped_ptr event_; + }; + + test::DirectTransport sender_transport, receiver_transport; + + CreateCalls(Call::Config(&sender_transport), + Call::Config(&receiver_transport)); + + sender_transport.SetReceiver(receiver_call_->Receiver()); + receiver_transport.SetReceiver(sender_call_->Receiver()); + + CreateTestConfigs(); + + TestFrameCallback pre_render_callback; + receive_config_.pre_render_callback = &pre_render_callback; + receive_config_.renderer = &renderer; + + CreateStreams(); + StartSending(); + + // Create frames that are smaller than the send width/height, this is done to + // check that the callbacks are done after processing video. + scoped_ptr frame_generator( + test::FrameGenerator::Create(kWidth, kHeight)); + send_stream_->Input()->SwapFrame(frame_generator->NextFrame()); + EXPECT_EQ(kEventSignaled, pre_render_callback.Wait()) + << "Timed out while waiting for pre-render callback."; + EXPECT_EQ(kEventSignaled, renderer.Wait()) + << "Timed out while waiting for the frame to render."; + + StopSending(); + + sender_transport.StopSending(); + receiver_transport.StopSending(); + + DestroyStreams(); +} + +TEST_F(CallTest, TransmitsFirstFrame) { + class Renderer : public VideoRenderer { + public: + Renderer() : event_(EventWrapper::Create()) {} + + virtual void RenderFrame(const I420VideoFrame& video_frame, + int /*time_to_render_ms*/) OVERRIDE { + event_->Set(); + } + + EventTypeWrapper Wait() { return event_->Wait(kDefaultTimeoutMs); } + + scoped_ptr event_; + } renderer; + + test::DirectTransport sender_transport, receiver_transport; + + CreateCalls(Call::Config(&sender_transport), + Call::Config(&receiver_transport)); + + sender_transport.SetReceiver(receiver_call_->Receiver()); + receiver_transport.SetReceiver(sender_call_->Receiver()); + + CreateTestConfigs(); + receive_config_.renderer = &renderer; + + CreateStreams(); + StartSending(); + + scoped_ptr frame_generator(test::FrameGenerator::Create( + send_config_.codec.width, send_config_.codec.height)); + send_stream_->Input()->SwapFrame(frame_generator->NextFrame()); + + EXPECT_EQ(kEventSignaled, renderer.Wait()) + << "Timed out while waiting for the frame to render."; + + StopSending(); + + sender_transport.StopSending(); + receiver_transport.StopSending(); + + DestroyStreams(); +} + +TEST_F(CallTest, ReceiverUsesLocalSsrc) { + class SyncRtcpObserver : public test::RtpRtcpObserver { + public: + SyncRtcpObserver() : test::RtpRtcpObserver(kDefaultTimeoutMs) {} + + virtual Action OnReceiveRtcp(const uint8_t* packet, + size_t length) OVERRIDE { + RTCPUtility::RTCPParserV2 parser(packet, length, true); + EXPECT_TRUE(parser.IsValid()); + uint32_t ssrc = 0; + ssrc |= static_cast(packet[4]) << 24; + ssrc |= static_cast(packet[5]) << 16; + ssrc |= static_cast(packet[6]) << 8; + ssrc |= static_cast(packet[7]) << 0; + EXPECT_EQ(kReceiverLocalSsrc, ssrc); + observation_complete_->Set(); + + return SEND_PACKET; + } + } observer; + + CreateCalls(Call::Config(observer.SendTransport()), + Call::Config(observer.ReceiveTransport())); + + observer.SetReceivers(receiver_call_->Receiver(), sender_call_->Receiver()); + + CreateTestConfigs(); + + CreateStreams(); + CreateFrameGenerator(); + StartSending(); + + EXPECT_EQ(kEventSignaled, observer.Wait()) + << "Timed out while waiting for a receiver RTCP packet to be sent."; + + StopSending(); + + observer.StopSending(); + + DestroyStreams(); +} + +TEST_F(CallTest, ReceivesAndRetransmitsNack) { + NackObserver observer; + + CreateCalls(Call::Config(observer.SendTransport()), + Call::Config(observer.ReceiveTransport())); + + observer.SetReceivers(receiver_call_->Receiver(), sender_call_->Receiver()); + + CreateTestConfigs(); + int rtp_history_ms = 1000; + send_config_.rtp.nack.rtp_history_ms = rtp_history_ms; + receive_config_.rtp.nack.rtp_history_ms = rtp_history_ms; + + CreateStreams(); + CreateFrameGenerator(); + StartSending(); + + // Wait() waits for an event triggered when NACKs have been received, NACKed + // packets retransmitted and frames rendered again. + EXPECT_EQ(kEventSignaled, observer.Wait()); + + StopSending(); + + observer.StopSending(); + + DestroyStreams(); +} + +// This test drops second RTP packet with a marker bit set, makes sure it's +// retransmitted and renders. Retransmission SSRCs are also checked. +void CallTest::DecodesRetransmittedFrame(bool retransmit_over_rtx) { + static const int kDroppedFrameNumber = 2; + class RetransmissionObserver : public test::RtpRtcpObserver, + public I420FrameCallback { + public: + RetransmissionObserver(bool expect_rtx) + : RtpRtcpObserver(kDefaultTimeoutMs), + retransmission_ssrc_(expect_rtx ? kSendRtxSsrc : kSendSsrc), + retransmission_payload_type_(expect_rtx ? kSendRtxPayloadType + : kSendPayloadType), + marker_bits_observed_(0), + retransmitted_timestamp_(0), + frame_retransmitted_(false) {} + + private: + virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE { + RTPHeader header; + EXPECT_TRUE(parser_->Parse(packet, static_cast(length), &header)); + + if (header.timestamp == retransmitted_timestamp_) { + EXPECT_EQ(retransmission_ssrc_, header.ssrc); + EXPECT_EQ(retransmission_payload_type_, header.payloadType); + frame_retransmitted_ = true; + return SEND_PACKET; + } + + EXPECT_EQ(kSendSsrc, header.ssrc); + EXPECT_EQ(kSendPayloadType, header.payloadType); + + // Found the second frame's final packet, drop this and expect a + // retransmission. + if (header.markerBit && ++marker_bits_observed_ == kDroppedFrameNumber) { + retransmitted_timestamp_ = header.timestamp; + return DROP_PACKET; + } + + return SEND_PACKET; + } + + virtual void FrameCallback(I420VideoFrame* frame) OVERRIDE { + CriticalSectionScoped crit_(lock_.get()); + if (frame->timestamp() == retransmitted_timestamp_) { + EXPECT_TRUE(frame_retransmitted_); + observation_complete_->Set(); + } + } + + const uint32_t retransmission_ssrc_; + const int retransmission_payload_type_; + int marker_bits_observed_; + uint32_t retransmitted_timestamp_; + bool frame_retransmitted_; + } observer(retransmit_over_rtx); + + CreateCalls(Call::Config(observer.SendTransport()), + Call::Config(observer.ReceiveTransport())); + + observer.SetReceivers(receiver_call_->Receiver(), sender_call_->Receiver()); + + CreateTestConfigs(); + send_config_.rtp.nack.rtp_history_ms = + receive_config_.rtp.nack.rtp_history_ms = 1000; + if (retransmit_over_rtx) { + send_config_.rtp.rtx.ssrcs.push_back(kSendRtxSsrc); + send_config_.rtp.rtx.payload_type = kSendRtxPayloadType; + int payload_type = send_config_.codec.plType; + receive_config_.rtp.rtx[payload_type].ssrc = kSendRtxSsrc; + receive_config_.rtp.rtx[payload_type].payload_type = kSendRtxPayloadType; + } + receive_config_.pre_render_callback = &observer; + + CreateStreams(); + CreateFrameGenerator(); + StartSending(); + + EXPECT_EQ(kEventSignaled, observer.Wait()) + << "Timed out while waiting for retransmission to render."; + + StopSending(); + observer.StopSending(); + DestroyStreams(); +} + +TEST_F(CallTest, DecodesRetransmittedFrame) { + DecodesRetransmittedFrame(false); +} + +TEST_F(CallTest, DecodesRetransmittedFrameOverRtx) { + DecodesRetransmittedFrame(true); +} + +TEST_F(CallTest, UsesFrameCallbacks) { + static const int kWidth = 320; + static const int kHeight = 240; + + class Renderer : public VideoRenderer { + public: + Renderer() : event_(EventWrapper::Create()) {} + + virtual void RenderFrame(const I420VideoFrame& video_frame, + int /*time_to_render_ms*/) OVERRIDE { + EXPECT_EQ(0, *video_frame.buffer(kYPlane)) + << "Rendered frame should have zero luma which is applied by the " + "pre-render callback."; + event_->Set(); + } + + EventTypeWrapper Wait() { return event_->Wait(kDefaultTimeoutMs); } + scoped_ptr event_; + } renderer; + + class TestFrameCallback : public I420FrameCallback { + public: + TestFrameCallback(int expected_luma_byte, int next_luma_byte) + : event_(EventWrapper::Create()), + expected_luma_byte_(expected_luma_byte), + next_luma_byte_(next_luma_byte) {} + + EventTypeWrapper Wait() { return event_->Wait(kDefaultTimeoutMs); } + + private: + virtual void FrameCallback(I420VideoFrame* frame) { + EXPECT_EQ(kWidth, frame->width()) + << "Width not as expected, callback done before resize?"; + EXPECT_EQ(kHeight, frame->height()) + << "Height not as expected, callback done before resize?"; + + // Previous luma specified, observed luma should be fairly close. + if (expected_luma_byte_ != -1) { + EXPECT_NEAR(expected_luma_byte_, *frame->buffer(kYPlane), 10); + } + + memset(frame->buffer(kYPlane), + next_luma_byte_, + frame->allocated_size(kYPlane)); + + event_->Set(); + } + + scoped_ptr event_; + int expected_luma_byte_; + int next_luma_byte_; + }; + + TestFrameCallback pre_encode_callback(-1, 255); // Changes luma to 255. + TestFrameCallback pre_render_callback(255, 0); // Changes luma from 255 to 0. + + test::DirectTransport sender_transport, receiver_transport; + + CreateCalls(Call::Config(&sender_transport), + Call::Config(&receiver_transport)); + + sender_transport.SetReceiver(receiver_call_->Receiver()); + receiver_transport.SetReceiver(sender_call_->Receiver()); + + CreateTestConfigs(); + send_config_.encoder = NULL; + send_config_.codec = sender_call_->GetVideoCodecs()[0]; + send_config_.codec.width = kWidth; + send_config_.codec.height = kHeight; + send_config_.pre_encode_callback = &pre_encode_callback; + receive_config_.pre_render_callback = &pre_render_callback; + receive_config_.renderer = &renderer; + + CreateStreams(); + StartSending(); + + // Create frames that are smaller than the send width/height, this is done to + // check that the callbacks are done after processing video. + scoped_ptr frame_generator( + test::FrameGenerator::Create(kWidth / 2, kHeight / 2)); + send_stream_->Input()->SwapFrame(frame_generator->NextFrame()); + + EXPECT_EQ(kEventSignaled, pre_encode_callback.Wait()) + << "Timed out while waiting for pre-encode callback."; + EXPECT_EQ(kEventSignaled, pre_render_callback.Wait()) + << "Timed out while waiting for pre-render callback."; + EXPECT_EQ(kEventSignaled, renderer.Wait()) + << "Timed out while waiting for the frame to render."; + + StopSending(); + + sender_transport.StopSending(); + receiver_transport.StopSending(); + + DestroyStreams(); +} + +class PliObserver : public test::RtpRtcpObserver, public VideoRenderer { + static const int kInverseDropProbability = 16; + + public: + explicit PliObserver(bool nack_enabled) + : test::RtpRtcpObserver(kLongTimeoutMs), + nack_enabled_(nack_enabled), + highest_dropped_timestamp_(0), + frames_to_drop_(0), + received_pli_(false) {} + + virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE { + RTPHeader header; + EXPECT_TRUE(parser_->Parse(packet, static_cast(length), &header)); + + // Drop all retransmitted packets to force a PLI. + if (header.timestamp <= highest_dropped_timestamp_) + return DROP_PACKET; + + if (frames_to_drop_ > 0) { + highest_dropped_timestamp_ = header.timestamp; + --frames_to_drop_; + return DROP_PACKET; + } + + return SEND_PACKET; + } + + virtual Action OnReceiveRtcp(const uint8_t* packet, size_t length) OVERRIDE { + RTCPUtility::RTCPParserV2 parser(packet, length, true); + EXPECT_TRUE(parser.IsValid()); + + for (RTCPUtility::RTCPPacketTypes packet_type = parser.Begin(); + packet_type != RTCPUtility::kRtcpNotValidCode; + packet_type = parser.Iterate()) { + if (!nack_enabled_) + EXPECT_NE(packet_type, RTCPUtility::kRtcpRtpfbNackCode); + + if (packet_type == RTCPUtility::kRtcpPsfbPliCode) { + received_pli_ = true; + break; + } + } + return SEND_PACKET; + } + + virtual void RenderFrame(const I420VideoFrame& video_frame, + int time_to_render_ms) OVERRIDE { + CriticalSectionScoped crit_(lock_.get()); + if (received_pli_ && video_frame.timestamp() > highest_dropped_timestamp_) { + observation_complete_->Set(); + } + if (!received_pli_) + frames_to_drop_ = kPacketsToDrop; + } + + private: + static const int kPacketsToDrop = 1; + + bool nack_enabled_; + uint32_t highest_dropped_timestamp_; + int frames_to_drop_; + bool received_pli_; +}; + +void CallTest::ReceivesPliAndRecovers(int rtp_history_ms) { + PliObserver observer(rtp_history_ms > 0); + + CreateCalls(Call::Config(observer.SendTransport()), + Call::Config(observer.ReceiveTransport())); + + observer.SetReceivers(receiver_call_->Receiver(), sender_call_->Receiver()); + + CreateTestConfigs(); + send_config_.rtp.nack.rtp_history_ms = rtp_history_ms; + receive_config_.rtp.nack.rtp_history_ms = rtp_history_ms; + receive_config_.renderer = &observer; + + CreateStreams(); + CreateFrameGenerator(); + StartSending(); + + // Wait() waits for an event triggered when Pli has been received and frames + // have been rendered afterwards. + EXPECT_EQ(kEventSignaled, observer.Wait()); + + StopSending(); + + observer.StopSending(); + + DestroyStreams(); +} + +TEST_F(CallTest, ReceivesPliAndRecoversWithNack) { + ReceivesPliAndRecovers(1000); +} + +// TODO(pbos): Enable this when 2250 is resolved. +TEST_F(CallTest, DISABLED_ReceivesPliAndRecoversWithoutNack) { + ReceivesPliAndRecovers(0); +} + +TEST_F(CallTest, SurvivesIncomingRtpPacketsToDestroyedReceiveStream) { + class PacketInputObserver : public PacketReceiver { + public: + explicit PacketInputObserver(PacketReceiver* receiver) + : receiver_(receiver), delivered_packet_(EventWrapper::Create()) {} + + EventTypeWrapper Wait() { + return delivered_packet_->Wait(kDefaultTimeoutMs); + } + + private: + virtual bool DeliverPacket(const uint8_t* packet, size_t length) { + if (RtpHeaderParser::IsRtcp(packet, static_cast(length))) { + return receiver_->DeliverPacket(packet, length); + } else { + EXPECT_FALSE(receiver_->DeliverPacket(packet, length)); + delivered_packet_->Set(); + return false; + } + } + + PacketReceiver* receiver_; + scoped_ptr delivered_packet_; + }; + + test::DirectTransport send_transport, receive_transport; + + CreateCalls(Call::Config(&send_transport), Call::Config(&receive_transport)); + PacketInputObserver input_observer(receiver_call_->Receiver()); + + send_transport.SetReceiver(&input_observer); + receive_transport.SetReceiver(sender_call_->Receiver()); + + CreateTestConfigs(); + + CreateStreams(); + CreateFrameGenerator(); + StartSending(); + + receiver_call_->DestroyVideoReceiveStream(receive_stream_); + receive_stream_ = NULL; + + // Wait() waits for a received packet. + EXPECT_EQ(kEventSignaled, input_observer.Wait()); + + StopSending(); + + DestroyStreams(); + + send_transport.StopSending(); + receive_transport.StopSending(); +} + +void CallTest::RespectsRtcpMode(newapi::RtcpMode rtcp_mode) { + static const int kRtpHistoryMs = 1000; + static const int kNumCompoundRtcpPacketsToObserve = 10; + class RtcpModeObserver : public test::RtpRtcpObserver { + public: + explicit RtcpModeObserver(newapi::RtcpMode rtcp_mode) + : test::RtpRtcpObserver(kDefaultTimeoutMs), + rtcp_mode_(rtcp_mode), + sent_rtp_(0), + sent_rtcp_(0) {} + + private: + virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE { + if (++sent_rtp_ % 3 == 0) + return DROP_PACKET; + + return SEND_PACKET; + } + + virtual Action OnReceiveRtcp(const uint8_t* packet, + size_t length) OVERRIDE { + ++sent_rtcp_; + RTCPUtility::RTCPParserV2 parser(packet, length, true); + EXPECT_TRUE(parser.IsValid()); + + RTCPUtility::RTCPPacketTypes packet_type = parser.Begin(); + bool has_report_block = false; + while (packet_type != RTCPUtility::kRtcpNotValidCode) { + EXPECT_NE(RTCPUtility::kRtcpSrCode, packet_type); + if (packet_type == RTCPUtility::kRtcpRrCode) { + has_report_block = true; + break; + } + packet_type = parser.Iterate(); + } + + switch (rtcp_mode_) { + case newapi::kRtcpCompound: + if (!has_report_block) { + ADD_FAILURE() << "Received RTCP packet without receiver report for " + "kRtcpCompound."; + observation_complete_->Set(); + } + + if (sent_rtcp_ >= kNumCompoundRtcpPacketsToObserve) + observation_complete_->Set(); + + break; + case newapi::kRtcpReducedSize: + if (!has_report_block) + observation_complete_->Set(); + break; + } + + return SEND_PACKET; + } + + newapi::RtcpMode rtcp_mode_; + int sent_rtp_; + int sent_rtcp_; + } observer(rtcp_mode); + + CreateCalls(Call::Config(observer.SendTransport()), + Call::Config(observer.ReceiveTransport())); + + observer.SetReceivers(receiver_call_->Receiver(), sender_call_->Receiver()); + + CreateTestConfigs(); + send_config_.rtp.nack.rtp_history_ms = kRtpHistoryMs; + receive_config_.rtp.nack.rtp_history_ms = kRtpHistoryMs; + receive_config_.rtp.rtcp_mode = rtcp_mode; + + CreateStreams(); + CreateFrameGenerator(); + StartSending(); + + EXPECT_EQ(kEventSignaled, observer.Wait()) + << (rtcp_mode == newapi::kRtcpCompound + ? "Timed out before observing enough compound packets." + : "Timed out before receiving a non-compound RTCP packet."); + + StopSending(); + observer.StopSending(); + DestroyStreams(); +} + +TEST_F(CallTest, UsesRtcpCompoundMode) { + RespectsRtcpMode(newapi::kRtcpCompound); +} + +TEST_F(CallTest, UsesRtcpReducedSizeMode) { + RespectsRtcpMode(newapi::kRtcpReducedSize); +} + +// Test sets up a Call multiple senders with different resolutions and SSRCs. +// Another is set up to receive all three of these with different renderers. +// Each renderer verifies that it receives the expected resolution, and as soon +// as every renderer has received a frame, the test finishes. +TEST_F(CallTest, SendsAndReceivesMultipleStreams) { + static const size_t kNumStreams = 3; + + class VideoOutputObserver : public VideoRenderer { + public: + VideoOutputObserver(test::FrameGeneratorCapturer** capturer, + int width, + int height) + : capturer_(capturer), + width_(width), + height_(height), + done_(EventWrapper::Create()) {} + + virtual void RenderFrame(const I420VideoFrame& video_frame, + int time_to_render_ms) OVERRIDE { + EXPECT_EQ(width_, video_frame.width()); + EXPECT_EQ(height_, video_frame.height()); + (*capturer_)->Stop(); + done_->Set(); + } + + void Wait() { done_->Wait(kDefaultTimeoutMs); } + + private: + test::FrameGeneratorCapturer** capturer_; + int width_; + int height_; + scoped_ptr done_; + }; + + struct { + uint32_t ssrc; + int width; + int height; + } codec_settings[kNumStreams] = {{1, 640, 480}, {2, 320, 240}, {3, 240, 160}}; + + test::DirectTransport sender_transport, receiver_transport; + scoped_ptr sender_call(Call::Create(Call::Config(&sender_transport))); + scoped_ptr receiver_call( + Call::Create(Call::Config(&receiver_transport))); + sender_transport.SetReceiver(receiver_call->Receiver()); + receiver_transport.SetReceiver(sender_call->Receiver()); + + VideoSendStream* send_streams[kNumStreams]; + VideoReceiveStream* receive_streams[kNumStreams]; + + VideoOutputObserver* observers[kNumStreams]; + test::FrameGeneratorCapturer* frame_generators[kNumStreams]; + + for (size_t i = 0; i < kNumStreams; ++i) { + uint32_t ssrc = codec_settings[i].ssrc; + int width = codec_settings[i].width; + int height = codec_settings[i].height; + observers[i] = new VideoOutputObserver(&frame_generators[i], width, height); + + VideoReceiveStream::Config receive_config = + receiver_call->GetDefaultReceiveConfig(); + receive_config.renderer = observers[i]; + receive_config.rtp.remote_ssrc = ssrc; + receive_config.rtp.local_ssrc = kReceiverLocalSsrc; + receive_streams[i] = + receiver_call->CreateVideoReceiveStream(receive_config); + receive_streams[i]->StartReceiving(); + + VideoSendStream::Config send_config = sender_call->GetDefaultSendConfig(); + send_config.rtp.ssrcs.push_back(ssrc); + send_config.codec.width = width; + send_config.codec.height = height; + send_streams[i] = sender_call->CreateVideoSendStream(send_config); + send_streams[i]->StartSending(); + + frame_generators[i] = test::FrameGeneratorCapturer::Create( + send_streams[i]->Input(), width, height, 30, Clock::GetRealTimeClock()); + frame_generators[i]->Start(); + } + + for (size_t i = 0; i < kNumStreams; ++i) { + observers[i]->Wait(); + } + + for (size_t i = 0; i < kNumStreams; ++i) { + frame_generators[i]->Stop(); + sender_call->DestroyVideoSendStream(send_streams[i]); + receiver_call->DestroyVideoReceiveStream(receive_streams[i]); + delete frame_generators[i]; + delete observers[i]; + } + + sender_transport.StopSending(); + receiver_transport.StopSending(); +}; + +TEST_F(CallTest, ObserversEncodedFrames) { + class EncodedFrameTestObserver : public EncodedFrameObserver { + public: + EncodedFrameTestObserver() + : length_(0), + frame_type_(kFrameEmpty), + called_(EventWrapper::Create()) {} + virtual ~EncodedFrameTestObserver() {} + + virtual void EncodedFrameCallback(const EncodedFrame& encoded_frame) { + frame_type_ = encoded_frame.frame_type_; + length_ = encoded_frame.length_; + buffer_.reset(new uint8_t[length_]); + memcpy(buffer_.get(), encoded_frame.data_, length_); + called_->Set(); + } + + EventTypeWrapper Wait() { return called_->Wait(kDefaultTimeoutMs); } + + void ExpectEqualFrames(const EncodedFrameTestObserver& observer) { + ASSERT_EQ(length_, observer.length_) + << "Observed frames are of different lengths."; + EXPECT_EQ(frame_type_, observer.frame_type_) + << "Observed frames have different frame types."; + EXPECT_EQ(0, memcmp(buffer_.get(), observer.buffer_.get(), length_)) + << "Observed encoded frames have different content."; + } + + private: + scoped_ptr buffer_; + size_t length_; + FrameType frame_type_; + scoped_ptr called_; + }; + + EncodedFrameTestObserver post_encode_observer; + EncodedFrameTestObserver pre_decode_observer; + + test::DirectTransport sender_transport, receiver_transport; + + CreateCalls(Call::Config(&sender_transport), + Call::Config(&receiver_transport)); + + sender_transport.SetReceiver(receiver_call_->Receiver()); + receiver_transport.SetReceiver(sender_call_->Receiver()); + + CreateTestConfigs(); + send_config_.post_encode_callback = &post_encode_observer; + receive_config_.pre_decode_callback = &pre_decode_observer; + + CreateStreams(); + StartSending(); + + scoped_ptr frame_generator(test::FrameGenerator::Create( + send_config_.codec.width, send_config_.codec.height)); + send_stream_->Input()->SwapFrame(frame_generator->NextFrame()); + + EXPECT_EQ(kEventSignaled, post_encode_observer.Wait()) + << "Timed out while waiting for send-side encoded-frame callback."; + + EXPECT_EQ(kEventSignaled, pre_decode_observer.Wait()) + << "Timed out while waiting for pre-decode encoded-frame callback."; + + post_encode_observer.ExpectEqualFrames(pre_decode_observer); + + StopSending(); + + sender_transport.StopSending(); + receiver_transport.StopSending(); + + DestroyStreams(); +} + +TEST_F(CallTest, ReceiveStreamSendsRemb) { + class RembObserver : public test::RtpRtcpObserver { + public: + RembObserver() : test::RtpRtcpObserver(kDefaultTimeoutMs) {} + + virtual Action OnReceiveRtcp(const uint8_t* packet, + size_t length) OVERRIDE { + RTCPUtility::RTCPParserV2 parser(packet, length, true); + EXPECT_TRUE(parser.IsValid()); + + bool received_psfb = false; + bool received_remb = false; + RTCPUtility::RTCPPacketTypes packet_type = parser.Begin(); + while (packet_type != RTCPUtility::kRtcpNotValidCode) { + if (packet_type == RTCPUtility::kRtcpPsfbRembCode) { + const RTCPUtility::RTCPPacket& packet = parser.Packet(); + EXPECT_EQ(packet.PSFBAPP.SenderSSRC, kReceiverLocalSsrc); + received_psfb = true; + } else if (packet_type == RTCPUtility::kRtcpPsfbRembItemCode) { + const RTCPUtility::RTCPPacket& packet = parser.Packet(); + EXPECT_GT(packet.REMBItem.BitRate, 0u); + EXPECT_EQ(packet.REMBItem.NumberOfSSRCs, 1u); + EXPECT_EQ(packet.REMBItem.SSRCs[0], kSendSsrc); + received_remb = true; + } + packet_type = parser.Iterate(); + } + if (received_psfb && received_remb) + observation_complete_->Set(); + return SEND_PACKET; + } + } observer; + + CreateCalls(Call::Config(observer.SendTransport()), + Call::Config(observer.ReceiveTransport())); + observer.SetReceivers(receiver_call_->Receiver(), sender_call_->Receiver()); + CreateTestConfigs(); + CreateStreams(); + CreateFrameGenerator(); + StartSending(); + + EXPECT_EQ(kEventSignaled, observer.Wait()) + << "Timed out while waiting for a receiver RTCP REMB packet to be sent."; + + StopSending(); + observer.StopSending(); + DestroyStreams(); +} + +void CallTest::TestXrReceiverReferenceTimeReport(bool enable_rrtr) { + static const int kNumRtcpReportPacketsToObserve = 5; + class RtcpXrObserver : public test::RtpRtcpObserver { + public: + explicit RtcpXrObserver(bool enable_rrtr) + : test::RtpRtcpObserver(kDefaultTimeoutMs), + enable_rrtr_(enable_rrtr), + sent_rtcp_sr_(0), + sent_rtcp_rr_(0), + sent_rtcp_rrtr_(0), + sent_rtcp_dlrr_(0) {} + + private: + // Receive stream should send RR packets (and RRTR packets if enabled). + virtual Action OnReceiveRtcp(const uint8_t* packet, + size_t length) OVERRIDE { + RTCPUtility::RTCPParserV2 parser(packet, length, true); + EXPECT_TRUE(parser.IsValid()); + + RTCPUtility::RTCPPacketTypes packet_type = parser.Begin(); + while (packet_type != RTCPUtility::kRtcpNotValidCode) { + if (packet_type == RTCPUtility::kRtcpRrCode) { + ++sent_rtcp_rr_; + } else if (packet_type == + RTCPUtility::kRtcpXrReceiverReferenceTimeCode) { + ++sent_rtcp_rrtr_; + } + EXPECT_NE(packet_type, RTCPUtility::kRtcpSrCode); + EXPECT_NE(packet_type, RTCPUtility::kRtcpXrDlrrReportBlockItemCode); + packet_type = parser.Iterate(); + } + return SEND_PACKET; + } + // Send stream should send SR packets (and DLRR packets if enabled). + virtual Action OnSendRtcp(const uint8_t* packet, size_t length) { + RTCPUtility::RTCPParserV2 parser(packet, length, true); + EXPECT_TRUE(parser.IsValid()); + + RTCPUtility::RTCPPacketTypes packet_type = parser.Begin(); + while (packet_type != RTCPUtility::kRtcpNotValidCode) { + if (packet_type == RTCPUtility::kRtcpSrCode) { + ++sent_rtcp_sr_; + } else if (packet_type == RTCPUtility::kRtcpXrDlrrReportBlockItemCode) { + ++sent_rtcp_dlrr_; + } + EXPECT_NE(packet_type, RTCPUtility::kRtcpXrReceiverReferenceTimeCode); + packet_type = parser.Iterate(); + } + if (sent_rtcp_sr_ > kNumRtcpReportPacketsToObserve && + sent_rtcp_rr_ > kNumRtcpReportPacketsToObserve) { + if (enable_rrtr_) { + EXPECT_GT(sent_rtcp_rrtr_, 0); + EXPECT_GT(sent_rtcp_dlrr_, 0); + } else { + EXPECT_EQ(0, sent_rtcp_rrtr_); + EXPECT_EQ(0, sent_rtcp_dlrr_); + } + observation_complete_->Set(); + } + return SEND_PACKET; + } + bool enable_rrtr_; + int sent_rtcp_sr_; + int sent_rtcp_rr_; + int sent_rtcp_rrtr_; + int sent_rtcp_dlrr_; + } observer(enable_rrtr); + + CreateCalls(Call::Config(observer.SendTransport()), + Call::Config(observer.ReceiveTransport())); + observer.SetReceivers(receiver_call_->Receiver(), sender_call_->Receiver()); + + CreateTestConfigs(); + receive_config_.rtp.rtcp_mode = newapi::kRtcpReducedSize; + receive_config_.rtp.rtcp_xr.receiver_reference_time_report = enable_rrtr; + + CreateStreams(); + CreateFrameGenerator(); + StartSending(); + + EXPECT_EQ(kEventSignaled, observer.Wait()) + << "Timed out while waiting for RTCP SR/RR packets to be sent."; + + StopSending(); + observer.StopSending(); + DestroyStreams(); +} + +class StatsObserver : public test::RtpRtcpObserver, public I420FrameCallback { + public: + StatsObserver() + : test::RtpRtcpObserver(kLongTimeoutMs), + receive_stream_(NULL), + send_stream_(NULL), + expected_receive_ssrc_(), + expected_send_ssrcs_(), + check_stats_event_(EventWrapper::Create()) {} + + void SetExpectedReceiveSsrc(uint32_t ssrc) { expected_receive_ssrc_ = ssrc; } + + void SetExpectedSendSsrcs(const std::vector& ssrcs) { + for (std::vector::const_iterator it = ssrcs.begin(); + it != ssrcs.end(); + ++it) { + expected_send_ssrcs_.insert(*it); + } + } + + void SetExpectedCName(std::string cname) { expected_cname_ = cname; } + + void SetReceiveStream(VideoReceiveStream* stream) { + receive_stream_ = stream; + } + + void SetSendStream(VideoSendStream* stream) { send_stream_ = stream; } + + void WaitForFilledStats() { + Clock* clock = Clock::GetRealTimeClock(); + int64_t now = clock->TimeInMilliseconds(); + int64_t stop_time = now + kLongTimeoutMs; + bool receive_ok = false; + bool send_ok = false; + + while (now < stop_time) { + if (!receive_ok) + receive_ok = CheckReceiveStats(); + if (!send_ok) + send_ok = CheckSendStats(); + + if (receive_ok && send_ok) + return; + + int64_t time_until_timout_ = stop_time - now; + if (time_until_timout_ > 0) + check_stats_event_->Wait(time_until_timout_); + now = clock->TimeInMilliseconds(); + } + + ADD_FAILURE() << "Timed out waiting for filled stats."; + for (std::map::const_iterator it = + receive_stats_filled_.begin(); + it != receive_stats_filled_.end(); + ++it) { + if (!it->second) { + ADD_FAILURE() << "Missing receive stats: " << it->first; + } + } + + for (std::map::const_iterator it = + send_stats_filled_.begin(); + it != send_stats_filled_.end(); + ++it) { + if (!it->second) { + ADD_FAILURE() << "Missing send stats: " << it->first; + } + } + } + + private: + virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE { + check_stats_event_->Set(); + return SEND_PACKET; + } + + virtual Action OnSendRtcp(const uint8_t* packet, size_t length) OVERRIDE { + check_stats_event_->Set(); + return SEND_PACKET; + } + + virtual Action OnReceiveRtp(const uint8_t* packet, size_t length) OVERRIDE { + check_stats_event_->Set(); + return SEND_PACKET; + } + + virtual Action OnReceiveRtcp(const uint8_t* packet, size_t length) OVERRIDE { + check_stats_event_->Set(); + return SEND_PACKET; + } + + virtual void FrameCallback(I420VideoFrame* video_frame) OVERRIDE { + // Ensure that we have at least 5ms send side delay. + int64_t render_time = video_frame->render_time_ms(); + if (render_time > 0) + video_frame->set_render_time_ms(render_time - 5); + } + + bool CheckReceiveStats() { + assert(receive_stream_ != NULL); + VideoReceiveStream::Stats stats = receive_stream_->GetStats(); + EXPECT_EQ(expected_receive_ssrc_, stats.ssrc); + + // Make sure all fields have been populated. + + receive_stats_filled_["IncomingRate"] |= + stats.network_frame_rate != 0 || stats.bitrate_bps != 0; + + receive_stats_filled_["FrameCallback"] |= stats.decode_frame_rate != 0; + + receive_stats_filled_["FrameRendered"] |= stats.render_frame_rate != 0; + + receive_stats_filled_["StatisticsUpdated"] |= + stats.rtcp_stats.cumulative_lost != 0 || + stats.rtcp_stats.extended_max_sequence_number != 0 || + stats.rtcp_stats.fraction_lost != 0 || stats.rtcp_stats.jitter != 0; + + receive_stats_filled_["DataCountersUpdated"] |= + stats.rtp_stats.bytes != 0 || stats.rtp_stats.fec_packets != 0 || + stats.rtp_stats.header_bytes != 0 || stats.rtp_stats.packets != 0 || + stats.rtp_stats.padding_bytes != 0 || + stats.rtp_stats.retransmitted_packets != 0; + + receive_stats_filled_["CodecStats"] |= + stats.avg_delay_ms != 0 || stats.discarded_packets != 0 || + stats.key_frames != 0 || stats.delta_frames != 0; + + receive_stats_filled_["CName"] |= stats.c_name == expected_cname_; + + return AllStatsFilled(receive_stats_filled_); + } + + bool CheckSendStats() { + assert(send_stream_ != NULL); + VideoSendStream::Stats stats = send_stream_->GetStats(); + + send_stats_filled_["NumStreams"] |= + stats.substreams.size() == expected_send_ssrcs_.size(); + + send_stats_filled_["Delay"] |= + stats.avg_delay_ms != 0 || stats.max_delay_ms != 0; + + receive_stats_filled_["CName"] |= stats.c_name == expected_cname_; + + for (std::map::const_iterator it = + stats.substreams.begin(); + it != stats.substreams.end(); + ++it) { + EXPECT_TRUE(expected_send_ssrcs_.find(it->first) != + expected_send_ssrcs_.end()); + + send_stats_filled_[CompoundKey("IncomingRate", it->first)] |= + stats.input_frame_rate != 0; + + const StreamStats& stream_stats = it->second; + + send_stats_filled_[CompoundKey("StatisticsUpdated", it->first)] |= + stream_stats.rtcp_stats.cumulative_lost != 0 || + stream_stats.rtcp_stats.extended_max_sequence_number != 0 || + stream_stats.rtcp_stats.fraction_lost != 0; + + send_stats_filled_[CompoundKey("DataCountersUpdated", it->first)] |= + stream_stats.rtp_stats.fec_packets != 0 || + stream_stats.rtp_stats.padding_bytes != 0 || + stream_stats.rtp_stats.retransmitted_packets != 0 || + stream_stats.rtp_stats.packets != 0; + + send_stats_filled_[CompoundKey("BitrateStatisticsObserver", it->first)] |= + stream_stats.bitrate_bps != 0; + + send_stats_filled_[CompoundKey("FrameCountObserver", it->first)] |= + stream_stats.delta_frames != 0 || stream_stats.key_frames != 0; + + send_stats_filled_[CompoundKey("OutgoingRate", it->first)] |= + stats.encode_frame_rate != 0; + } + + return AllStatsFilled(send_stats_filled_); + } + + std::string CompoundKey(const char* name, uint32_t ssrc) { + std::ostringstream oss; + oss << name << "_" << ssrc; + return oss.str(); + } + + bool AllStatsFilled(const std::map& stats_map) { + for (std::map::const_iterator it = stats_map.begin(); + it != stats_map.end(); + ++it) { + if (!it->second) + return false; + } + return true; + } + + VideoReceiveStream* receive_stream_; + std::map receive_stats_filled_; + + VideoSendStream* send_stream_; + std::map send_stats_filled_; + + uint32_t expected_receive_ssrc_; + std::set expected_send_ssrcs_; + std::string expected_cname_; + + scoped_ptr check_stats_event_; +}; + +TEST_F(CallTest, GetStats) { + StatsObserver observer; + + CreateCalls(Call::Config(observer.SendTransport()), + Call::Config(observer.ReceiveTransport())); + + observer.SetReceivers(receiver_call_->Receiver(), sender_call_->Receiver()); + + CreateTestConfigs(); + send_config_.pre_encode_callback = &observer; // Used to inject delay. + send_config_.rtp.c_name = "SomeCName"; + + observer.SetExpectedReceiveSsrc(receive_config_.rtp.local_ssrc); + observer.SetExpectedSendSsrcs(send_config_.rtp.ssrcs); + observer.SetExpectedCName(send_config_.rtp.c_name); + + CreateStreams(); + observer.SetReceiveStream(receive_stream_); + observer.SetSendStream(send_stream_); + CreateFrameGenerator(); + StartSending(); + + observer.WaitForFilledStats(); + + StopSending(); + observer.StopSending(); + DestroyStreams(); +} + +TEST_F(CallTest, ReceiverReferenceTimeReportEnabled) { + TestXrReceiverReferenceTimeReport(true); +} + +TEST_F(CallTest, ReceiverReferenceTimeReportDisabled) { + TestXrReceiverReferenceTimeReport(false); +} +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/video/encoded_frame_callback_adapter.cc b/media/webrtc/trunk/webrtc/video/encoded_frame_callback_adapter.cc new file mode 100644 index 000000000000..f5eca7ce449a --- /dev/null +++ b/media/webrtc/trunk/webrtc/video/encoded_frame_callback_adapter.cc @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/video/encoded_frame_callback_adapter.h" + +#include "webrtc/modules/video_coding/main/source/encoded_frame.h" + +namespace webrtc { +namespace internal { + +EncodedFrameCallbackAdapter::EncodedFrameCallbackAdapter( + EncodedFrameObserver* observer) : observer_(observer) { +} + +EncodedFrameCallbackAdapter::~EncodedFrameCallbackAdapter() {} + +int32_t EncodedFrameCallbackAdapter::Encoded( + EncodedImage& encodedImage, + const CodecSpecificInfo* codecSpecificInfo, + const RTPFragmentationHeader* fragmentation) { + assert(observer_ != NULL); + FrameType frame_type = + VCMEncodedFrame::ConvertFrameType(encodedImage._frameType); + const EncodedFrame frame(encodedImage._buffer, + encodedImage._length, + frame_type); + observer_->EncodedFrameCallback(frame); + return 0; +} + +} // namespace internal +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/video/encoded_frame_callback_adapter.h b/media/webrtc/trunk/webrtc/video/encoded_frame_callback_adapter.h new file mode 100644 index 000000000000..d3814797ad44 --- /dev/null +++ b/media/webrtc/trunk/webrtc/video/encoded_frame_callback_adapter.h @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_VIDEO_ENCODED_FRAME_CALLBACK_ADAPTER_H_ +#define WEBRTC_VIDEO_ENCODED_FRAME_CALLBACK_ADAPTER_H_ + +#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" +#include "webrtc/frame_callback.h" + +namespace webrtc { +namespace internal { + +class EncodedFrameCallbackAdapter : public EncodedImageCallback { + public: + explicit EncodedFrameCallbackAdapter(EncodedFrameObserver* observer); + virtual ~EncodedFrameCallbackAdapter(); + + virtual int32_t Encoded(EncodedImage& encodedImage, + const CodecSpecificInfo* codecSpecificInfo, + const RTPFragmentationHeader* fragmentation); + + private: + EncodedFrameObserver* observer_; +}; + +} // namespace internal +} // namespace webrtc + +#endif // WEBRTC_VIDEO_ENCODED_FRAME_CALLBACK_ADAPTER_H_ diff --git a/media/webrtc/trunk/webrtc/video_engine/test/full_stack.cc b/media/webrtc/trunk/webrtc/video/full_stack.cc similarity index 54% rename from media/webrtc/trunk/webrtc/video_engine/test/full_stack.cc rename to media/webrtc/trunk/webrtc/video/full_stack.cc index 5f1240fd7cc0..1181bfe488ec 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/full_stack.cc +++ b/media/webrtc/trunk/webrtc/video/full_stack.cc @@ -15,25 +15,27 @@ #include "gflags/gflags.h" #include "testing/gtest/include/gtest/gtest.h" +#include "webrtc/call.h" #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" #include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h" #include "webrtc/system_wrappers/interface/clock.h" #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" #include "webrtc/system_wrappers/interface/event_wrapper.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/system_wrappers/interface/sleep.h" #include "webrtc/test/testsupport/fileutils.h" +#include "webrtc/test/direct_transport.h" +#include "webrtc/test/frame_generator_capturer.h" +#include "webrtc/test/statistics.h" +#include "webrtc/test/video_renderer.h" #include "webrtc/typedefs.h" -#include "webrtc/video_engine/new_include/call.h" -#include "webrtc/video_engine/test/common/direct_transport.h" -#include "webrtc/video_engine/test/common/frame_generator_capturer.h" -#include "webrtc/video_engine/test/common/generate_ssrcs.h" -#include "webrtc/video_engine/test/common/statistics.h" -#include "webrtc/video_engine/test/common/video_renderer.h" DEFINE_int32(seconds, 10, "Seconds to run each clip."); namespace webrtc { +static const uint32_t kSendSsrc = 0x654321; + struct FullStackTestParams { const char* test_label; struct { @@ -69,16 +71,15 @@ class VideoAnalyzer : public PacketReceiver, public: VideoAnalyzer(VideoSendStreamInput* input, Transport* transport, - VideoRenderer* loopback_video, const char* test_label, double avg_psnr_threshold, double avg_ssim_threshold, - uint64_t duration_frames) + int duration_frames) : input_(input), transport_(transport), - renderer_(loopback_video), receiver_(NULL), test_label_(test_label), + dropped_frames_(0), rtp_timestamp_delta_(0), first_send_frame_(NULL), last_render_time_(0), @@ -86,9 +87,17 @@ class VideoAnalyzer : public PacketReceiver, avg_ssim_threshold_(avg_ssim_threshold), frames_left_(duration_frames), crit_(CriticalSectionWrapper::CreateCriticalSection()), - trigger_(EventWrapper::Create()) {} + comparison_lock_(CriticalSectionWrapper::CreateCriticalSection()), + comparison_thread_(ThreadWrapper::CreateThread(&FrameComparisonThread, + this)), + trigger_(EventWrapper::Create()) { + unsigned int id; + EXPECT_TRUE(comparison_thread_->Start(id)); + } ~VideoAnalyzer() { + EXPECT_TRUE(comparison_thread_->Stop()); + while (!frames_.empty()) { delete frames_.back(); frames_.pop_back(); @@ -99,6 +108,8 @@ class VideoAnalyzer : public PacketReceiver, } } + virtual void SetReceiver(PacketReceiver* receiver) { receiver_ = receiver; } + virtual bool DeliverPacket(const uint8_t* packet, size_t length) OVERRIDE { scoped_ptr parser(RtpHeaderParser::Create()); RTPHeader header; @@ -112,8 +123,11 @@ class VideoAnalyzer : public PacketReceiver, return receiver_->DeliverPacket(packet, length); } - virtual void PutFrame(const I420VideoFrame& video_frame, - uint32_t delta_capture_ms) OVERRIDE { + virtual void PutFrame(const I420VideoFrame& video_frame) OVERRIDE { + ADD_FAILURE() << "PutFrame() should not have been called in this test."; + } + + virtual void SwapFrame(I420VideoFrame* video_frame) OVERRIDE { I420VideoFrame* copy = NULL; { CriticalSectionScoped cs(crit_.get()); @@ -125,7 +139,7 @@ class VideoAnalyzer : public PacketReceiver, if (copy == NULL) copy = new I420VideoFrame(); - copy->CopyFrame(video_frame); + copy->CopyFrame(*video_frame); copy->set_timestamp(copy->render_time_ms() * 90); { @@ -136,10 +150,10 @@ class VideoAnalyzer : public PacketReceiver, frames_.push_back(copy); } - input_->PutFrame(video_frame, delta_capture_ms); + input_->SwapFrame(video_frame); } - virtual bool SendRTP(const uint8_t* packet, size_t length) OVERRIDE { + virtual bool SendRtp(const uint8_t* packet, size_t length) OVERRIDE { scoped_ptr parser(RtpHeaderParser::Create()); RTPHeader header; parser->Parse(packet, static_cast(length), &header); @@ -155,21 +169,24 @@ class VideoAnalyzer : public PacketReceiver, Clock::GetRealTimeClock()->CurrentNtpInMilliseconds(); } - return transport_->SendRTP(packet, length); + return transport_->SendRtp(packet, length); } - virtual bool SendRTCP(const uint8_t* packet, size_t length) OVERRIDE { - return transport_->SendRTCP(packet, length); + virtual bool SendRtcp(const uint8_t* packet, size_t length) OVERRIDE { + return transport_->SendRtcp(packet, length); } virtual void RenderFrame(const I420VideoFrame& video_frame, int time_to_render_ms) OVERRIDE { + int64_t render_time_ms = + Clock::GetRealTimeClock()->CurrentNtpInMilliseconds(); uint32_t send_timestamp = video_frame.timestamp() - rtp_timestamp_delta_; { CriticalSectionScoped cs(crit_.get()); while (frames_.front()->timestamp() < send_timestamp) { - AddFrameComparison(frames_.front(), &last_rendered_frame_, true); + AddFrameComparison( + frames_.front(), &last_rendered_frame_, true, render_time_ms); frame_pool_.push_back(frames_.front()); frames_.pop_front(); } @@ -177,56 +194,150 @@ class VideoAnalyzer : public PacketReceiver, I420VideoFrame* reference_frame = frames_.front(); frames_.pop_front(); assert(reference_frame != NULL); + EXPECT_EQ(reference_frame->timestamp(), send_timestamp); assert(reference_frame->timestamp() == send_timestamp); - AddFrameComparison(reference_frame, &video_frame, false); + AddFrameComparison(reference_frame, &video_frame, false, render_time_ms); frame_pool_.push_back(reference_frame); + } + + last_rendered_frame_.CopyFrame(video_frame); + } + + void Wait() { trigger_->Wait(120 * 1000); } + + VideoSendStreamInput* input_; + Transport* transport_; + PacketReceiver* receiver_; + + private: + struct FrameComparison { + FrameComparison(const I420VideoFrame* reference, + const I420VideoFrame* render, + bool dropped, + int64_t send_time_ms, + int64_t recv_time_ms, + int64_t render_time_ms) + : dropped(dropped), + send_time_ms(send_time_ms), + recv_time_ms(recv_time_ms), + render_time_ms(render_time_ms) { + this->reference.CopyFrame(*reference); + this->render.CopyFrame(*render); + } + + FrameComparison(const FrameComparison& compare) + : dropped(compare.dropped), + send_time_ms(compare.send_time_ms), + recv_time_ms(compare.recv_time_ms), + render_time_ms(compare.render_time_ms) { + this->reference.CopyFrame(compare.reference); + this->render.CopyFrame(compare.render); + } + + ~FrameComparison() {} + + I420VideoFrame reference; + I420VideoFrame render; + bool dropped; + int64_t send_time_ms; + int64_t recv_time_ms; + int64_t render_time_ms; + }; + + void AddFrameComparison(const I420VideoFrame* reference, + const I420VideoFrame* render, + bool dropped, + int64_t render_time_ms) { + int64_t send_time_ms = send_times_[reference->timestamp()]; + send_times_.erase(reference->timestamp()); + int64_t recv_time_ms = recv_times_[reference->timestamp()]; + recv_times_.erase(reference->timestamp()); + + CriticalSectionScoped crit(comparison_lock_.get()); + comparisons_.push_back(FrameComparison(reference, + render, + dropped, + send_time_ms, + recv_time_ms, + render_time_ms)); + } + + static bool FrameComparisonThread(void* obj) { + return static_cast(obj)->CompareFrames(); + } + + bool CompareFrames() { + assert(frames_left_ > 0); + + I420VideoFrame reference; + I420VideoFrame render; + bool dropped; + int64_t send_time_ms; + int64_t recv_time_ms; + int64_t render_time_ms; + + SleepMs(10); + + while (true) { + { + CriticalSectionScoped crit(comparison_lock_.get()); + if (comparisons_.empty()) + return true; + reference.SwapFrame(&comparisons_.front().reference); + render.SwapFrame(&comparisons_.front().render); + dropped = comparisons_.front().dropped; + send_time_ms = comparisons_.front().send_time_ms; + recv_time_ms = comparisons_.front().recv_time_ms; + render_time_ms = comparisons_.front().render_time_ms; + comparisons_.pop_front(); + } + + PerformFrameComparison(&reference, + &render, + dropped, + send_time_ms, + recv_time_ms, + render_time_ms); if (--frames_left_ == 0) { PrintResult("psnr", psnr_, " dB"); PrintResult("ssim", ssim_, ""); PrintResult("sender_time", sender_time_, " ms"); + printf( + "RESULT dropped_frames: %s = %d\n", test_label_, dropped_frames_); PrintResult("receiver_time", receiver_time_, " ms"); PrintResult("total_delay_incl_network", end_to_end_, " ms"); PrintResult("time_between_rendered_frames", rendered_delta_, " ms"); EXPECT_GT(psnr_.Mean(), avg_psnr_threshold_); EXPECT_GT(ssim_.Mean(), avg_ssim_threshold_); trigger_->Set(); + + return false; } } - - renderer_->RenderFrame(video_frame, time_to_render_ms); - last_rendered_frame_.CopyFrame(video_frame); } - void Wait() { trigger_->Wait(WEBRTC_EVENT_INFINITE); } - - VideoSendStreamInput* input_; - Transport* transport_; - VideoRenderer* renderer_; - PacketReceiver* receiver_; - - private: - void AddFrameComparison(const I420VideoFrame* reference_frame, - const I420VideoFrame* render, - bool dropped) { - int64_t render_time = Clock::GetRealTimeClock()->CurrentNtpInMilliseconds(); - psnr_.AddSample(I420PSNR(reference_frame, render)); - ssim_.AddSample(I420SSIM(reference_frame, render)); - if (dropped) + void PerformFrameComparison(const I420VideoFrame* reference, + const I420VideoFrame* render, + bool dropped, + int64_t send_time_ms, + int64_t recv_time_ms, + int64_t render_time_ms) { + psnr_.AddSample(I420PSNR(reference, render)); + ssim_.AddSample(I420SSIM(reference, render)); + if (dropped) { + ++dropped_frames_; return; + } if (last_render_time_ != 0) - rendered_delta_.AddSample(render_time - last_render_time_); - last_render_time_ = render_time; + rendered_delta_.AddSample(render_time_ms - last_render_time_); + last_render_time_ = render_time_ms; - int64_t input_time = reference_frame->render_time_ms(); - int64_t send_time = send_times_[reference_frame->timestamp()]; - send_times_.erase(reference_frame->timestamp()); - sender_time_.AddSample(send_time - input_time); - int64_t recv_time = recv_times_[reference_frame->timestamp()]; - recv_times_.erase(reference_frame->timestamp()); - receiver_time_.AddSample(render_time - recv_time); - end_to_end_.AddSample(render_time - input_time); + int64_t input_time_ms = reference->render_time_ms(); + sender_time_.AddSample(send_time_ms - input_time_ms); + receiver_time_.AddSample(render_time_ms - recv_time_ms); + end_to_end_.AddSample(render_time_ms - input_time_ms); } void PrintResult(const char* result_type, @@ -248,6 +359,7 @@ class VideoAnalyzer : public PacketReceiver, test::Statistics end_to_end_; test::Statistics rendered_delta_; + int dropped_frames_; std::deque frames_; std::deque frame_pool_; I420VideoFrame last_rendered_frame_; @@ -258,39 +370,34 @@ class VideoAnalyzer : public PacketReceiver, int64_t last_render_time_; double avg_psnr_threshold_; double avg_ssim_threshold_; - uint32_t frames_left_; + int frames_left_; scoped_ptr crit_; + scoped_ptr comparison_lock_; + scoped_ptr comparison_thread_; + std::deque comparisons_; scoped_ptr trigger_; }; -TEST_P(FullStackTest, DISABLED_NoPacketLoss) { +TEST_P(FullStackTest, NoPacketLoss) { + static const uint32_t kReceiverLocalSsrc = 0x123456; FullStackTestParams params = GetParam(); - scoped_ptr local_preview(test::VideoRenderer::Create( - "Local Preview", params.clip.width, params.clip.height)); - scoped_ptr loopback_video(test::VideoRenderer::Create( - "Loopback Video", params.clip.width, params.clip.height)); - test::DirectTransport transport; - VideoAnalyzer analyzer( - NULL, - &transport, - loopback_video.get(), - params.test_label, - params.avg_psnr_threshold, - params.avg_ssim_threshold, - static_cast(FLAGS_seconds * params.clip.fps)); + VideoAnalyzer analyzer(NULL, + &transport, + params.test_label, + params.avg_psnr_threshold, + params.avg_ssim_threshold, + FLAGS_seconds * params.clip.fps); Call::Config call_config(&analyzer); scoped_ptr call(Call::Create(call_config)); - analyzer.receiver_ = call->Receiver(); + analyzer.SetReceiver(call->Receiver()); transport.SetReceiver(&analyzer); VideoSendStream::Config send_config = call->GetDefaultSendConfig(); - test::GenerateRandomSsrcs(&send_config, &reserved_ssrcs_); - - send_config.local_renderer = local_preview.get(); + send_config.rtp.ssrcs.push_back(kSendSsrc); // TODO(pbos): static_cast shouldn't be required after mflodman refactors the // VideoCodec struct. @@ -300,7 +407,7 @@ TEST_P(FullStackTest, DISABLED_NoPacketLoss) { send_config.codec.startBitrate = params.bitrate; send_config.codec.maxBitrate = params.bitrate; - VideoSendStream* send_stream = call->CreateSendStream(send_config); + VideoSendStream* send_stream = call->CreateVideoSendStream(send_config); analyzer.input_ = send_stream->Input(); scoped_ptr file_capturer( @@ -311,27 +418,31 @@ TEST_P(FullStackTest, DISABLED_NoPacketLoss) { params.clip.height, params.clip.fps, Clock::GetRealTimeClock())); + ASSERT_TRUE(file_capturer.get() != NULL) + << "Could not create capturer for " << params.clip.name + << ".yuv. Is this resource file present?"; VideoReceiveStream::Config receive_config = call->GetDefaultReceiveConfig(); - receive_config.rtp.ssrc = send_config.rtp.ssrcs[0]; + receive_config.rtp.remote_ssrc = send_config.rtp.ssrcs[0]; + receive_config.rtp.local_ssrc = kReceiverLocalSsrc; receive_config.renderer = &analyzer; VideoReceiveStream* receive_stream = - call->CreateReceiveStream(receive_config); + call->CreateVideoReceiveStream(receive_config); - receive_stream->StartReceive(); - send_stream->StartSend(); + receive_stream->StartReceiving(); + send_stream->StartSending(); file_capturer->Start(); analyzer.Wait(); file_capturer->Stop(); - send_stream->StopSend(); - receive_stream->StopReceive(); + send_stream->StopSending(); + receive_stream->StopReceiving(); - call->DestroyReceiveStream(receive_stream); - call->DestroySendStream(send_stream); + call->DestroyVideoReceiveStream(receive_stream); + call->DestroyVideoSendStream(send_stream); transport.StopSending(); } diff --git a/media/webrtc/trunk/webrtc/video_engine/test/loopback.cc b/media/webrtc/trunk/webrtc/video/loopback.cc similarity index 73% rename from media/webrtc/trunk/webrtc/video_engine/test/loopback.cc rename to media/webrtc/trunk/webrtc/video/loopback.cc index 93f2d839b367..48de326efa04 100644 --- a/media/webrtc/trunk/webrtc/video_engine/test/loopback.cc +++ b/media/webrtc/trunk/webrtc/video/loopback.cc @@ -14,17 +14,16 @@ #include "testing/gtest/include/gtest/gtest.h" +#include "webrtc/call.h" #include "webrtc/system_wrappers/interface/clock.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/test/direct_transport.h" +#include "webrtc/test/flags.h" +#include "webrtc/test/run_loop.h" +#include "webrtc/test/run_tests.h" +#include "webrtc/test/video_capturer.h" +#include "webrtc/test/video_renderer.h" #include "webrtc/typedefs.h" -#include "webrtc/video_engine/new_include/call.h" -#include "webrtc/video_engine/test/common/direct_transport.h" -#include "webrtc/video_engine/test/common/flags.h" -#include "webrtc/video_engine/test/common/generate_ssrcs.h" -#include "webrtc/video_engine/test/common/run_loop.h" -#include "webrtc/video_engine/test/common/run_tests.h" -#include "webrtc/video_engine/test/common/video_capturer.h" -#include "webrtc/video_engine/test/common/video_renderer.h" namespace webrtc { @@ -33,6 +32,9 @@ class LoopbackTest : public ::testing::Test { std::map reserved_ssrcs_; }; +static const uint32_t kSendSsrc = 0x654321; +static const uint32_t kReceiverLocalSsrc = 0x123456; + TEST_F(LoopbackTest, Test) { scoped_ptr local_preview(test::VideoRenderer::Create( "Local Preview", test::flags::Width(), test::flags::Height())); @@ -41,14 +43,13 @@ TEST_F(LoopbackTest, Test) { test::DirectTransport transport; Call::Config call_config(&transport); - call_config.overuse_detection = true; scoped_ptr call(Call::Create(call_config)); // Loopback, call sends to itself. transport.SetReceiver(call->Receiver()); VideoSendStream::Config send_config = call->GetDefaultSendConfig(); - test::GenerateRandomSsrcs(&send_config, &reserved_ssrcs_); + send_config.rtp.ssrcs.push_back(kSendSsrc); send_config.local_renderer = local_preview.get(); @@ -63,7 +64,7 @@ TEST_F(LoopbackTest, Test) { send_config.codec.maxBitrate = static_cast(test::flags::MaxBitrate()); - VideoSendStream* send_stream = call->CreateSendStream(send_config); + VideoSendStream* send_stream = call->CreateVideoSendStream(send_config); Clock* test_clock = Clock::GetRealTimeClock(); @@ -75,25 +76,26 @@ TEST_F(LoopbackTest, Test) { test_clock)); VideoReceiveStream::Config receive_config = call->GetDefaultReceiveConfig(); - receive_config.rtp.ssrc = send_config.rtp.ssrcs[0]; + receive_config.rtp.remote_ssrc = send_config.rtp.ssrcs[0]; + receive_config.rtp.local_ssrc = kReceiverLocalSsrc; receive_config.renderer = loopback_video.get(); VideoReceiveStream* receive_stream = - call->CreateReceiveStream(receive_config); + call->CreateVideoReceiveStream(receive_config); - receive_stream->StartReceive(); - send_stream->StartSend(); + receive_stream->StartReceiving(); + send_stream->StartSending(); camera->Start(); test::PressEnterToContinue(); camera->Stop(); - send_stream->StopSend(); - receive_stream->StopReceive(); + send_stream->StopSending(); + receive_stream->StopReceiving(); - call->DestroyReceiveStream(receive_stream); - call->DestroySendStream(send_stream); + call->DestroyVideoReceiveStream(receive_stream); + call->DestroyVideoSendStream(send_stream); transport.StopSending(); } -} // webrtc +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/video/rampup_tests.cc b/media/webrtc/trunk/webrtc/video/rampup_tests.cc new file mode 100644 index 000000000000..266172d4de7a --- /dev/null +++ b/media/webrtc/trunk/webrtc/video/rampup_tests.cc @@ -0,0 +1,277 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include + +#include +#include + +#include "testing/gtest/include/gtest/gtest.h" + +#include "webrtc/call.h" +#include "webrtc/common.h" +#include "webrtc/experiments.h" +#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" +#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h" +#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h" +#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h" +#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h" +#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h" +#include "webrtc/system_wrappers/interface/critical_section_wrapper.h" +#include "webrtc/system_wrappers/interface/event_wrapper.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/test/direct_transport.h" +#include "webrtc/test/fake_decoder.h" +#include "webrtc/test/fake_encoder.h" +#include "webrtc/test/frame_generator_capturer.h" +#include "webrtc/test/testsupport/perf_test.h" +#include "webrtc/video/transport_adapter.h" + +namespace webrtc { + +namespace { +static const int kAbsoluteSendTimeExtensionId = 7; +static const int kMaxPacketSize = 1500; +} + +class StreamObserver : public newapi::Transport, public RemoteBitrateObserver { + public: + typedef std::map BytesSentMap; + typedef std::map SsrcMap; + StreamObserver(int num_expected_ssrcs, + const SsrcMap& rtx_media_ssrcs, + newapi::Transport* feedback_transport, + Clock* clock) + : critical_section_(CriticalSectionWrapper::CreateCriticalSection()), + all_ssrcs_sent_(EventWrapper::Create()), + rtp_parser_(RtpHeaderParser::Create()), + feedback_transport_(feedback_transport), + receive_stats_(ReceiveStatistics::Create(clock)), + payload_registry_( + new RTPPayloadRegistry(-1, + RTPPayloadStrategy::CreateStrategy(false))), + clock_(clock), + num_expected_ssrcs_(num_expected_ssrcs), + rtx_media_ssrcs_(rtx_media_ssrcs), + total_sent_(0), + padding_sent_(0), + rtx_media_sent_(0), + total_packets_sent_(0), + padding_packets_sent_(0), + rtx_media_packets_sent_(0) { + // Ideally we would only have to instantiate an RtcpSender, an + // RtpHeaderParser and a RemoteBitrateEstimator here, but due to the current + // state of the RTP module we need a full module and receive statistics to + // be able to produce an RTCP with REMB. + RtpRtcp::Configuration config; + config.receive_statistics = receive_stats_.get(); + feedback_transport_.Enable(); + config.outgoing_transport = &feedback_transport_; + rtp_rtcp_.reset(RtpRtcp::CreateRtpRtcp(config)); + rtp_rtcp_->SetREMBStatus(true); + rtp_rtcp_->SetRTCPStatus(kRtcpNonCompound); + rtp_parser_->RegisterRtpHeaderExtension(kRtpExtensionAbsoluteSendTime, + kAbsoluteSendTimeExtensionId); + AbsoluteSendTimeRemoteBitrateEstimatorFactory rbe_factory; + const uint32_t kRemoteBitrateEstimatorMinBitrateBps = 30000; + remote_bitrate_estimator_.reset( + rbe_factory.Create(this, clock, kRemoteBitrateEstimatorMinBitrateBps)); + } + + virtual void OnReceiveBitrateChanged(const std::vector& ssrcs, + unsigned int bitrate) { + CriticalSectionScoped lock(critical_section_.get()); + if (ssrcs.size() == num_expected_ssrcs_ && bitrate >= kExpectedBitrateBps) { + if (rtx_media_ssrcs_.empty() || rtx_media_sent_ > 0) { + const ::testing::TestInfo* const test_info = + ::testing::UnitTest::GetInstance()->current_test_info(); + webrtc::test::PrintResult( + "total-sent", "", test_info->name(), total_sent_, "bytes", false); + webrtc::test::PrintResult("padding-sent", + "", + test_info->name(), + padding_sent_, + "bytes", + false); + webrtc::test::PrintResult("rtx-media-sent", + "", + test_info->name(), + rtx_media_sent_, + "bytes", + false); + webrtc::test::PrintResult("total-packets-sent", + "", + test_info->name(), + total_packets_sent_, + "packets", + false); + webrtc::test::PrintResult("padding-packets-sent", + "", + test_info->name(), + padding_packets_sent_, + "packets", + false); + webrtc::test::PrintResult("rtx-packets-sent", + "", + test_info->name(), + rtx_media_packets_sent_, + "packets", + false); + all_ssrcs_sent_->Set(); + } + } + rtp_rtcp_->SetREMBData( + bitrate, static_cast(ssrcs.size()), &ssrcs[0]); + rtp_rtcp_->Process(); + } + + virtual bool SendRtp(const uint8_t* packet, size_t length) OVERRIDE { + CriticalSectionScoped lock(critical_section_.get()); + RTPHeader header; + EXPECT_TRUE(rtp_parser_->Parse(packet, static_cast(length), &header)); + receive_stats_->IncomingPacket(header, length, false); + payload_registry_->SetIncomingPayloadType(header); + remote_bitrate_estimator_->IncomingPacket( + clock_->TimeInMilliseconds(), static_cast(length - 12), header); + if (remote_bitrate_estimator_->TimeUntilNextProcess() <= 0) { + remote_bitrate_estimator_->Process(); + } + total_sent_ += length; + padding_sent_ += header.paddingLength; + ++total_packets_sent_; + if (header.paddingLength > 0) + ++padding_packets_sent_; + if (rtx_media_ssrcs_.find(header.ssrc) != rtx_media_ssrcs_.end()) { + rtx_media_sent_ += length - header.headerLength - header.paddingLength; + if (header.paddingLength == 0) + ++rtx_media_packets_sent_; + uint8_t restored_packet[kMaxPacketSize]; + uint8_t* restored_packet_ptr = restored_packet; + int restored_length = static_cast(length); + payload_registry_->RestoreOriginalPacket(&restored_packet_ptr, + packet, + &restored_length, + rtx_media_ssrcs_[header.ssrc], + header); + length = restored_length; + EXPECT_TRUE(rtp_parser_->Parse( + restored_packet, static_cast(length), &header)); + } else { + rtp_rtcp_->SetRemoteSSRC(header.ssrc); + } + return true; + } + + virtual bool SendRtcp(const uint8_t* packet, size_t length) OVERRIDE { + return true; + } + + EventTypeWrapper Wait() { return all_ssrcs_sent_->Wait(120 * 1000); } + + private: + static const unsigned int kExpectedBitrateBps = 1200000; + + scoped_ptr critical_section_; + scoped_ptr all_ssrcs_sent_; + scoped_ptr rtp_parser_; + scoped_ptr rtp_rtcp_; + internal::TransportAdapter feedback_transport_; + scoped_ptr receive_stats_; + scoped_ptr payload_registry_; + scoped_ptr remote_bitrate_estimator_; + Clock* clock_; + const size_t num_expected_ssrcs_; + SsrcMap rtx_media_ssrcs_; + size_t total_sent_; + size_t padding_sent_; + size_t rtx_media_sent_; + int total_packets_sent_; + int padding_packets_sent_; + int rtx_media_packets_sent_; +}; + +class RampUpTest : public ::testing::TestWithParam { + public: + virtual void SetUp() { reserved_ssrcs_.clear(); } + + protected: + void RunRampUpTest(bool pacing, bool rtx) { + const size_t kNumberOfStreams = 3; + std::vector ssrcs; + for (size_t i = 0; i < kNumberOfStreams; ++i) + ssrcs.push_back(static_cast(i + 1)); + uint32_t kRtxSsrcs[kNumberOfStreams] = {111, 112, 113}; + StreamObserver::SsrcMap rtx_ssrc_map; + if (rtx) { + for (size_t i = 0; i < ssrcs.size(); ++i) + rtx_ssrc_map[kRtxSsrcs[i]] = ssrcs[i]; + } + test::DirectTransport receiver_transport; + int num_expected_ssrcs = kNumberOfStreams + (rtx ? 1 : 0); + StreamObserver stream_observer(num_expected_ssrcs, + rtx_ssrc_map, + &receiver_transport, + Clock::GetRealTimeClock()); + + Call::Config call_config(&stream_observer); + webrtc::Config webrtc_config; + call_config.webrtc_config = &webrtc_config; + webrtc_config.Set(new PaddingStrategy(rtx)); + scoped_ptr call(Call::Create(call_config)); + VideoSendStream::Config send_config = call->GetDefaultSendConfig(); + + receiver_transport.SetReceiver(call->Receiver()); + + test::FakeEncoder encoder(Clock::GetRealTimeClock()); + send_config.encoder = &encoder; + send_config.internal_source = false; + test::FakeEncoder::SetCodecSettings(&send_config.codec, kNumberOfStreams); + send_config.codec.plType = 125; + send_config.pacing = pacing; + send_config.rtp.nack.rtp_history_ms = 1000; + send_config.rtp.ssrcs.insert( + send_config.rtp.ssrcs.begin(), ssrcs.begin(), ssrcs.end()); + if (rtx) { + send_config.rtp.rtx.payload_type = 96; + send_config.rtp.rtx.ssrcs.insert(send_config.rtp.rtx.ssrcs.begin(), + kRtxSsrcs, + kRtxSsrcs + kNumberOfStreams); + } + send_config.rtp.extensions.push_back( + RtpExtension(RtpExtension::kAbsSendTime, kAbsoluteSendTimeExtensionId)); + + VideoSendStream* send_stream = call->CreateVideoSendStream(send_config); + + scoped_ptr frame_generator_capturer( + test::FrameGeneratorCapturer::Create(send_stream->Input(), + send_config.codec.width, + send_config.codec.height, + 30, + Clock::GetRealTimeClock())); + + send_stream->StartSending(); + frame_generator_capturer->Start(); + + EXPECT_EQ(kEventSignaled, stream_observer.Wait()); + + frame_generator_capturer->Stop(); + send_stream->StopSending(); + + call->DestroyVideoSendStream(send_stream); + } + std::map reserved_ssrcs_; +}; + +TEST_F(RampUpTest, WithoutPacing) { RunRampUpTest(false, false); } + +TEST_F(RampUpTest, WithPacing) { RunRampUpTest(true, false); } + +TEST_F(RampUpTest, WithPacingAndRtx) { RunRampUpTest(true, true); } + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/video/receive_statistics_proxy.cc b/media/webrtc/trunk/webrtc/video/receive_statistics_proxy.cc new file mode 100644 index 000000000000..f42e4d33a5dc --- /dev/null +++ b/media/webrtc/trunk/webrtc/video/receive_statistics_proxy.cc @@ -0,0 +1,100 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/video/receive_statistics_proxy.h" + +#include "webrtc/system_wrappers/interface/clock.h" +#include "webrtc/system_wrappers/interface/critical_section_wrapper.h" + +namespace webrtc { +namespace internal { + +ReceiveStatisticsProxy::ReceiveStatisticsProxy(uint32_t ssrc, + Clock* clock, + ViERTP_RTCP* rtp_rtcp, + ViECodec* codec, + int channel) + : channel_(channel), + lock_(CriticalSectionWrapper::CreateCriticalSection()), + clock_(clock), + // 1000ms window, scale 1000 for ms to s. + decode_fps_estimator_(1000, 1000), + renders_fps_estimator_(1000, 1000), + codec_(codec), + rtp_rtcp_(rtp_rtcp) { + stats_.ssrc = ssrc; +} + +ReceiveStatisticsProxy::~ReceiveStatisticsProxy() {} + +VideoReceiveStream::Stats ReceiveStatisticsProxy::GetStats() const { + VideoReceiveStream::Stats stats; + { + CriticalSectionScoped cs(lock_.get()); + stats = stats_; + } + stats.c_name = GetCName(); + codec_->GetReceiveSideDelay(channel_, &stats.avg_delay_ms); + stats.discarded_packets = codec_->GetDiscardedPackets(channel_); + codec_->GetReceiveCodecStastistics( + channel_, stats.key_frames, stats.delta_frames); + + return stats; +} + +std::string ReceiveStatisticsProxy::GetCName() const { + char rtcp_cname[ViERTP_RTCP::KMaxRTCPCNameLength]; + if (rtp_rtcp_->GetRemoteRTCPCName(channel_, rtcp_cname) != 0) + rtcp_cname[0] = '\0'; + return rtcp_cname; +} + +void ReceiveStatisticsProxy::IncomingRate(const int video_channel, + const unsigned int framerate, + const unsigned int bitrate) { + CriticalSectionScoped cs(lock_.get()); + stats_.network_frame_rate = framerate; + stats_.bitrate_bps = bitrate; +} + +void ReceiveStatisticsProxy::StatisticsUpdated( + const webrtc::RtcpStatistics& statistics, + uint32_t ssrc) { + CriticalSectionScoped cs(lock_.get()); + + stats_.rtcp_stats = statistics; +} + +void ReceiveStatisticsProxy::DataCountersUpdated( + const webrtc::StreamDataCounters& counters, + uint32_t ssrc) { + CriticalSectionScoped cs(lock_.get()); + + stats_.rtp_stats = counters; +} + +void ReceiveStatisticsProxy::OnDecodedFrame() { + uint64_t now = clock_->TimeInMilliseconds(); + + CriticalSectionScoped cs(lock_.get()); + decode_fps_estimator_.Update(1, now); + stats_.decode_frame_rate = decode_fps_estimator_.Rate(now); +} + +void ReceiveStatisticsProxy::OnRenderedFrame() { + uint64_t now = clock_->TimeInMilliseconds(); + + CriticalSectionScoped cs(lock_.get()); + renders_fps_estimator_.Update(1, now); + stats_.render_frame_rate = renders_fps_estimator_.Rate(now); +} + +} // namespace internal +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/video/receive_statistics_proxy.h b/media/webrtc/trunk/webrtc/video/receive_statistics_proxy.h new file mode 100644 index 000000000000..bedebb3fce81 --- /dev/null +++ b/media/webrtc/trunk/webrtc/video/receive_statistics_proxy.h @@ -0,0 +1,87 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_VIDEO_RECEIVE_STATISTICS_PROXY_H_ +#define WEBRTC_VIDEO_RECEIVE_STATISTICS_PROXY_H_ + +#include + +#include "webrtc/common_types.h" +#include "webrtc/frame_callback.h" +#include "webrtc/modules/remote_bitrate_estimator/rate_statistics.h" +#include "webrtc/video_engine/include/vie_codec.h" +#include "webrtc/video_engine/include/vie_rtp_rtcp.h" +#include "webrtc/video_receive_stream.h" +#include "webrtc/video_renderer.h" + +namespace webrtc { + +class Clock; +class CriticalSectionWrapper; +class ViECodec; +class ViEDecoderObserver; + +namespace internal { + +class ReceiveStatisticsProxy : public ViEDecoderObserver, + public RtcpStatisticsCallback, + public StreamDataCountersCallback { + public: + ReceiveStatisticsProxy(uint32_t ssrc, + Clock* clock, + ViERTP_RTCP* rtp_rtcp, + ViECodec* codec, + int channel); + virtual ~ReceiveStatisticsProxy(); + + VideoReceiveStream::Stats GetStats() const; + + void OnDecodedFrame(); + void OnRenderedFrame(); + + // Overrides ViEDecoderObserver. + virtual void IncomingCodecChanged(const int video_channel, + const VideoCodec& video_codec) OVERRIDE {} + virtual void IncomingRate(const int video_channel, + const unsigned int framerate, + const unsigned int bitrate) OVERRIDE; + virtual void DecoderTiming(int decode_ms, + int max_decode_ms, + int current_delay_ms, + int target_delay_ms, + int jitter_buffer_ms, + int min_playout_delay_ms, + int render_delay_ms) OVERRIDE {} + virtual void RequestNewKeyFrame(const int video_channel) OVERRIDE {} + + // Overrides RtcpStatisticsBallback. + virtual void StatisticsUpdated(const webrtc::RtcpStatistics& statistics, + uint32_t ssrc) OVERRIDE; + + // Overrides StreamDataCountersCallback. + virtual void DataCountersUpdated(const webrtc::StreamDataCounters& counters, + uint32_t ssrc) OVERRIDE; + + private: + std::string GetCName() const; + + const int channel_; + scoped_ptr lock_; + Clock* clock_; + VideoReceiveStream::Stats stats_; + RateStatistics decode_fps_estimator_; + RateStatistics renders_fps_estimator_; + ViECodec* codec_; + ViERTP_RTCP* rtp_rtcp_; +}; + +} // namespace internal +} // namespace webrtc +#endif // WEBRTC_VIDEO_RECEIVE_STATISTICS_PROXY_H_ diff --git a/media/webrtc/trunk/webrtc/video/send_statistics_proxy.cc b/media/webrtc/trunk/webrtc/video/send_statistics_proxy.cc new file mode 100644 index 000000000000..096f0a92ad41 --- /dev/null +++ b/media/webrtc/trunk/webrtc/video/send_statistics_proxy.cc @@ -0,0 +1,117 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/video/send_statistics_proxy.h" + +#include + +#include "webrtc/system_wrappers/interface/critical_section_wrapper.h" + +namespace webrtc { + +SendStatisticsProxy::SendStatisticsProxy( + const VideoSendStream::Config& config, + SendStatisticsProxy::StatsProvider* stats_provider) + : config_(config), + lock_(CriticalSectionWrapper::CreateCriticalSection()), + stats_provider_(stats_provider) {} + +SendStatisticsProxy::~SendStatisticsProxy() {} + +void SendStatisticsProxy::OutgoingRate(const int video_channel, + const unsigned int framerate, + const unsigned int bitrate) { + CriticalSectionScoped cs(lock_.get()); + stats_.encode_frame_rate = framerate; +} + +void SendStatisticsProxy::CapturedFrameRate(const int capture_id, + const unsigned char frame_rate) { + CriticalSectionScoped cs(lock_.get()); + stats_.input_frame_rate = frame_rate; +} + +VideoSendStream::Stats SendStatisticsProxy::GetStats() const { + VideoSendStream::Stats stats; + { + CriticalSectionScoped cs(lock_.get()); + stats = stats_; + } + stats_provider_->GetSendSideDelay(&stats); + stats.c_name = stats_provider_->GetCName(); + return stats; +} + +StreamStats* SendStatisticsProxy::GetStatsEntry(uint32_t ssrc) { + std::map::iterator it = stats_.substreams.find(ssrc); + if (it != stats_.substreams.end()) + return &it->second; + + if (std::find(config_.rtp.ssrcs.begin(), config_.rtp.ssrcs.end(), ssrc) == + config_.rtp.ssrcs.end()) + return NULL; + + return &stats_.substreams[ssrc]; // Insert new entry and return ptr. +} + +void SendStatisticsProxy::StatisticsUpdated(const RtcpStatistics& statistics, + uint32_t ssrc) { + CriticalSectionScoped cs(lock_.get()); + StreamStats* stats = GetStatsEntry(ssrc); + if (stats == NULL) + return; + + stats->rtcp_stats = statistics; +} + +void SendStatisticsProxy::DataCountersUpdated( + const StreamDataCounters& counters, + uint32_t ssrc) { + CriticalSectionScoped cs(lock_.get()); + StreamStats* stats = GetStatsEntry(ssrc); + if (stats == NULL) + return; + + stats->rtp_stats = counters; +} + +void SendStatisticsProxy::Notify(const BitrateStatistics& bitrate, + uint32_t ssrc) { + CriticalSectionScoped cs(lock_.get()); + StreamStats* stats = GetStatsEntry(ssrc); + if (stats == NULL) + return; + + stats->bitrate_bps = bitrate.bitrate_bps; +} + +void SendStatisticsProxy::FrameCountUpdated(FrameType frame_type, + uint32_t frame_count, + const unsigned int ssrc) { + CriticalSectionScoped cs(lock_.get()); + StreamStats* stats = GetStatsEntry(ssrc); + if (stats == NULL) + return; + + switch (frame_type) { + case kVideoFrameDelta: + stats->delta_frames = frame_count; + break; + case kVideoFrameKey: + stats->key_frames = frame_count; + break; + case kFrameEmpty: + case kAudioFrameSpeech: + case kAudioFrameCN: + break; + } +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/video/send_statistics_proxy.h b/media/webrtc/trunk/webrtc/video/send_statistics_proxy.h new file mode 100644 index 000000000000..5ad4c4550d3f --- /dev/null +++ b/media/webrtc/trunk/webrtc/video/send_statistics_proxy.h @@ -0,0 +1,92 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_VIDEO_SEND_STATISTICS_PROXY_H_ +#define WEBRTC_VIDEO_SEND_STATISTICS_PROXY_H_ + +#include + +#include "webrtc/common_types.h" +#include "webrtc/video_engine/include/vie_codec.h" +#include "webrtc/video_engine/include/vie_capture.h" +#include "webrtc/video_send_stream.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" + +namespace webrtc { + +class CriticalSectionWrapper; + +class SendStatisticsProxy : public RtcpStatisticsCallback, + public StreamDataCountersCallback, + public BitrateStatisticsObserver, + public FrameCountObserver, + public ViEEncoderObserver, + public ViECaptureObserver { + public: + class StatsProvider { + protected: + StatsProvider() {} + virtual ~StatsProvider() {} + + public: + virtual bool GetSendSideDelay(VideoSendStream::Stats* stats) = 0; + virtual std::string GetCName() = 0; + }; + + SendStatisticsProxy(const VideoSendStream::Config& config, + StatsProvider* stats_provider); + virtual ~SendStatisticsProxy(); + + VideoSendStream::Stats GetStats() const; + + protected: + // From RtcpStatisticsCallback. + virtual void StatisticsUpdated(const RtcpStatistics& statistics, + uint32_t ssrc) OVERRIDE; + // From StreamDataCountersCallback. + virtual void DataCountersUpdated(const StreamDataCounters& counters, + uint32_t ssrc) OVERRIDE; + + // From BitrateStatisticsObserver. + virtual void Notify(const BitrateStatistics& stats, uint32_t ssrc) OVERRIDE; + + // From FrameCountObserver. + virtual void FrameCountUpdated(FrameType frame_type, + uint32_t frame_count, + const unsigned int ssrc) OVERRIDE; + + // From ViEEncoderObserver. + virtual void OutgoingRate(const int video_channel, + const unsigned int framerate, + const unsigned int bitrate) OVERRIDE; + + virtual void SuspendChange(int video_channel, bool is_suspended) OVERRIDE {} + + // From ViECaptureObserver. + virtual void BrightnessAlarm(const int capture_id, + const Brightness brightness) OVERRIDE {} + + virtual void CapturedFrameRate(const int capture_id, + const unsigned char frame_rate) OVERRIDE; + + virtual void NoPictureAlarm(const int capture_id, + const CaptureAlarm alarm) OVERRIDE {} + + private: + StreamStats* GetStatsEntry(uint32_t ssrc); + + const VideoSendStream::Config config_; + scoped_ptr lock_; + VideoSendStream::Stats stats_; + StatsProvider* stats_provider_; +}; + +} // namespace webrtc +#endif // WEBRTC_VIDEO_SEND_STATISTICS_PROXY_H_ diff --git a/media/webrtc/trunk/webrtc/video/send_statistics_proxy_unittest.cc b/media/webrtc/trunk/webrtc/video/send_statistics_proxy_unittest.cc new file mode 100644 index 000000000000..ed74a4f77550 --- /dev/null +++ b/media/webrtc/trunk/webrtc/video/send_statistics_proxy_unittest.cc @@ -0,0 +1,229 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file includes unit tests for SendStatisticsProxy. +#include "webrtc/video/send_statistics_proxy.h" + +#include +#include +#include + +#include "testing/gtest/include/gtest/gtest.h" + +namespace webrtc { + +class SendStatisticsProxyTest : public ::testing::Test, + protected SendStatisticsProxy::StatsProvider { + public: + SendStatisticsProxyTest() : avg_delay_ms_(0), max_delay_ms_(0) {} + virtual ~SendStatisticsProxyTest() {} + + protected: + virtual void SetUp() { + statistics_proxy_.reset( + new SendStatisticsProxy(GetTestConfig(), this)); + config_ = GetTestConfig(); + expected_ = VideoSendStream::Stats(); + } + + VideoSendStream::Config GetTestConfig() { + VideoSendStream::Config config; + config.rtp.ssrcs.push_back(17); + config.rtp.ssrcs.push_back(42); + return config; + } + + virtual bool GetSendSideDelay(VideoSendStream::Stats* stats) OVERRIDE { + stats->avg_delay_ms = avg_delay_ms_; + stats->max_delay_ms = max_delay_ms_; + return true; + } + + virtual std::string GetCName() { return cname_; } + + void ExpectEqual(VideoSendStream::Stats one, VideoSendStream::Stats other) { + EXPECT_EQ(one.avg_delay_ms, other.avg_delay_ms); + EXPECT_EQ(one.input_frame_rate, other.input_frame_rate); + EXPECT_EQ(one.encode_frame_rate, other.encode_frame_rate); + EXPECT_EQ(one.avg_delay_ms, other.avg_delay_ms); + EXPECT_EQ(one.max_delay_ms, other.max_delay_ms); + EXPECT_EQ(one.c_name, other.c_name); + + EXPECT_EQ(one.substreams.size(), other.substreams.size()); + for (std::map::const_iterator it = + one.substreams.begin(); + it != one.substreams.end(); + ++it) { + std::map::const_iterator corresponding_it = + other.substreams.find(it->first); + ASSERT_TRUE(corresponding_it != other.substreams.end()); + const StreamStats& a = it->second; + const StreamStats& b = corresponding_it->second; + + EXPECT_EQ(a.key_frames, b.key_frames); + EXPECT_EQ(a.delta_frames, b.delta_frames); + EXPECT_EQ(a.bitrate_bps, b.bitrate_bps); + + EXPECT_EQ(a.rtp_stats.bytes, b.rtp_stats.bytes); + EXPECT_EQ(a.rtp_stats.header_bytes, b.rtp_stats.header_bytes); + EXPECT_EQ(a.rtp_stats.padding_bytes, b.rtp_stats.padding_bytes); + EXPECT_EQ(a.rtp_stats.packets, b.rtp_stats.packets); + EXPECT_EQ(a.rtp_stats.retransmitted_packets, + b.rtp_stats.retransmitted_packets); + EXPECT_EQ(a.rtp_stats.fec_packets, b.rtp_stats.fec_packets); + + EXPECT_EQ(a.rtcp_stats.fraction_lost, b.rtcp_stats.fraction_lost); + EXPECT_EQ(a.rtcp_stats.cumulative_lost, b.rtcp_stats.cumulative_lost); + EXPECT_EQ(a.rtcp_stats.extended_max_sequence_number, + b.rtcp_stats.extended_max_sequence_number); + EXPECT_EQ(a.rtcp_stats.jitter, b.rtcp_stats.jitter); + } + } + + scoped_ptr statistics_proxy_; + VideoSendStream::Config config_; + int avg_delay_ms_; + int max_delay_ms_; + std::string cname_; + VideoSendStream::Stats expected_; + typedef std::map::const_iterator StreamIterator; +}; + +TEST_F(SendStatisticsProxyTest, RtcpStatistics) { + RtcpStatisticsCallback* callback = statistics_proxy_.get(); + for (std::vector::const_iterator it = config_.rtp.ssrcs.begin(); + it != config_.rtp.ssrcs.end(); + ++it) { + const uint32_t ssrc = *it; + StreamStats& ssrc_stats = expected_.substreams[ssrc]; + + // Add statistics with some arbitrary, but unique, numbers. + uint32_t offset = ssrc * sizeof(RtcpStatistics); + ssrc_stats.rtcp_stats.cumulative_lost = offset; + ssrc_stats.rtcp_stats.extended_max_sequence_number = offset + 1; + ssrc_stats.rtcp_stats.fraction_lost = offset + 2; + ssrc_stats.rtcp_stats.jitter = offset + 3; + callback->StatisticsUpdated(ssrc_stats.rtcp_stats, ssrc); + } + + VideoSendStream::Stats stats = statistics_proxy_->GetStats(); + ExpectEqual(expected_, stats); +} + +TEST_F(SendStatisticsProxyTest, FrameRates) { + const int capture_fps = 31; + const int encode_fps = 29; + + ViECaptureObserver* capture_observer = statistics_proxy_.get(); + capture_observer->CapturedFrameRate(0, capture_fps); + ViEEncoderObserver* encoder_observer = statistics_proxy_.get(); + encoder_observer->OutgoingRate(0, encode_fps, 0); + + VideoSendStream::Stats stats = statistics_proxy_->GetStats(); + EXPECT_EQ(capture_fps, stats.input_frame_rate); + EXPECT_EQ(encode_fps, stats.encode_frame_rate); +} + +TEST_F(SendStatisticsProxyTest, FrameCounts) { + FrameCountObserver* observer = statistics_proxy_.get(); + for (std::vector::const_iterator it = config_.rtp.ssrcs.begin(); + it != config_.rtp.ssrcs.end(); + ++it) { + const uint32_t ssrc = *it; + // Add statistics with some arbitrary, but unique, numbers. + StreamStats& stats = expected_.substreams[ssrc]; + uint32_t offset = ssrc * sizeof(StreamStats); + stats.key_frames = offset; + stats.delta_frames = offset + 1; + observer->FrameCountUpdated(kVideoFrameKey, stats.key_frames, ssrc); + observer->FrameCountUpdated(kVideoFrameDelta, stats.delta_frames, ssrc); + } + + VideoSendStream::Stats stats = statistics_proxy_->GetStats(); + ExpectEqual(expected_, stats); +} + +TEST_F(SendStatisticsProxyTest, DataCounters) { + StreamDataCountersCallback* callback = statistics_proxy_.get(); + for (std::vector::const_iterator it = config_.rtp.ssrcs.begin(); + it != config_.rtp.ssrcs.end(); + ++it) { + const uint32_t ssrc = *it; + StreamDataCounters& counters = expected_.substreams[ssrc].rtp_stats; + // Add statistics with some arbitrary, but unique, numbers. + uint32_t offset = ssrc * sizeof(StreamDataCounters); + counters.bytes = offset; + counters.header_bytes = offset + 1; + counters.fec_packets = offset + 2; + counters.padding_bytes = offset + 3; + counters.retransmitted_packets = offset + 4; + counters.packets = offset + 5; + callback->DataCountersUpdated(counters, ssrc); + } + + VideoSendStream::Stats stats = statistics_proxy_->GetStats(); + ExpectEqual(expected_, stats); +} + +TEST_F(SendStatisticsProxyTest, Bitrate) { + BitrateStatisticsObserver* observer = statistics_proxy_.get(); + for (std::vector::const_iterator it = config_.rtp.ssrcs.begin(); + it != config_.rtp.ssrcs.end(); + ++it) { + const uint32_t ssrc = *it; + BitrateStatistics bitrate; + bitrate.bitrate_bps = ssrc; + observer->Notify(bitrate, ssrc); + expected_.substreams[ssrc].bitrate_bps = ssrc; + } + + VideoSendStream::Stats stats = statistics_proxy_->GetStats(); + ExpectEqual(expected_, stats); +} + +TEST_F(SendStatisticsProxyTest, StreamStats) { + avg_delay_ms_ = 1; + max_delay_ms_ = 2; + cname_ = "qwertyuiop"; + + VideoSendStream::Stats stats = statistics_proxy_->GetStats(); + + EXPECT_EQ(avg_delay_ms_, stats.avg_delay_ms); + EXPECT_EQ(max_delay_ms_, stats.max_delay_ms); + EXPECT_EQ(cname_, stats.c_name); +} + +TEST_F(SendStatisticsProxyTest, NoSubstreams) { + uint32_t exluded_ssrc = + *std::max_element(config_.rtp.ssrcs.begin(), config_.rtp.ssrcs.end()) + 1; + // From RtcpStatisticsCallback. + RtcpStatistics rtcp_stats; + RtcpStatisticsCallback* rtcp_callback = statistics_proxy_.get(); + rtcp_callback->StatisticsUpdated(rtcp_stats, exluded_ssrc); + + // From StreamDataCountersCallback. + StreamDataCounters rtp_stats; + StreamDataCountersCallback* rtp_callback = statistics_proxy_.get(); + rtp_callback->DataCountersUpdated(rtp_stats, exluded_ssrc); + + // From BitrateStatisticsObserver. + BitrateStatistics bitrate; + BitrateStatisticsObserver* bitrate_observer = statistics_proxy_.get(); + bitrate_observer->Notify(bitrate, exluded_ssrc); + + // From FrameCountObserver. + FrameCountObserver* fps_observer = statistics_proxy_.get(); + fps_observer->FrameCountUpdated(kVideoFrameKey, 1, exluded_ssrc); + + VideoSendStream::Stats stats = statistics_proxy_->GetStats(); + EXPECT_TRUE(stats.substreams.empty()); +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/video_engine/internal/transport_adapter.cc b/media/webrtc/trunk/webrtc/video/transport_adapter.cc similarity index 66% rename from media/webrtc/trunk/webrtc/video_engine/internal/transport_adapter.cc rename to media/webrtc/trunk/webrtc/video/transport_adapter.cc index 635586ad1b8d..6f27d9972a59 100644 --- a/media/webrtc/trunk/webrtc/video_engine/internal/transport_adapter.cc +++ b/media/webrtc/trunk/webrtc/video/transport_adapter.cc @@ -8,18 +8,21 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "webrtc/video_engine/internal/transport_adapter.h" +#include "webrtc/video/transport_adapter.h" namespace webrtc { namespace internal { TransportAdapter::TransportAdapter(newapi::Transport* transport) - : transport_(transport) {} + : transport_(transport), enabled_(0) {} int TransportAdapter::SendPacket(int /*channel*/, const void* packet, int length) { - bool success = transport_->SendRTP(static_cast(packet), + if (enabled_.Value() == 0) + return false; + + bool success = transport_->SendRtp(static_cast(packet), static_cast(length)); return success ? length : -1; } @@ -27,10 +30,21 @@ int TransportAdapter::SendPacket(int /*channel*/, int TransportAdapter::SendRTCPPacket(int /*channel*/, const void* packet, int length) { - bool success = transport_->SendRTCP(static_cast(packet), + if (enabled_.Value() == 0) + return false; + + bool success = transport_->SendRtcp(static_cast(packet), static_cast(length)); return success ? length : -1; } +void TransportAdapter::Enable() { + // If this exchange fails it means enabled_ was already true, no need to + // check result and iterate. + enabled_.CompareExchange(1, 0); +} + +void TransportAdapter::Disable() { enabled_.CompareExchange(0, 1); } + } // namespace internal } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/video_engine/internal/transport_adapter.h b/media/webrtc/trunk/webrtc/video/transport_adapter.h similarity index 88% rename from media/webrtc/trunk/webrtc/video_engine/internal/transport_adapter.h rename to media/webrtc/trunk/webrtc/video/transport_adapter.h index a5e2e281bf1a..79f995be9b6c 100644 --- a/media/webrtc/trunk/webrtc/video_engine/internal/transport_adapter.h +++ b/media/webrtc/trunk/webrtc/video/transport_adapter.h @@ -11,7 +11,8 @@ #define WEBRTC_VIDEO_ENGINE_INTERNAL_TRANSPORT_ADAPTER_H_ #include "webrtc/common_types.h" -#include "webrtc/video_engine/new_include/transport.h" +#include "webrtc/system_wrappers/interface/atomic32.h" +#include "webrtc/transport.h" namespace webrtc { namespace internal { @@ -25,8 +26,12 @@ class TransportAdapter : public webrtc::Transport { virtual int SendRTCPPacket(int /*channel*/, const void* packet, int length) OVERRIDE; + void Enable(); + void Disable(); + private: newapi::Transport *transport_; + Atomic32 enabled_; }; } // namespace internal } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/video/video_receive_stream.cc b/media/webrtc/trunk/webrtc/video/video_receive_stream.cc new file mode 100644 index 000000000000..337eda4d49ed --- /dev/null +++ b/media/webrtc/trunk/webrtc/video/video_receive_stream.cc @@ -0,0 +1,241 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/video/video_receive_stream.h" + +#include +#include + +#include + +#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" +#include "webrtc/system_wrappers/interface/clock.h" +#include "webrtc/video/receive_statistics_proxy.h" +#include "webrtc/video_engine/include/vie_base.h" +#include "webrtc/video_engine/include/vie_capture.h" +#include "webrtc/video_engine/include/vie_codec.h" +#include "webrtc/video_engine/include/vie_external_codec.h" +#include "webrtc/video_engine/include/vie_image_process.h" +#include "webrtc/video_engine/include/vie_network.h" +#include "webrtc/video_engine/include/vie_render.h" +#include "webrtc/video_engine/include/vie_rtp_rtcp.h" +#include "webrtc/video_receive_stream.h" + +namespace webrtc { +namespace internal { + +VideoReceiveStream::VideoReceiveStream(webrtc::VideoEngine* video_engine, + const VideoReceiveStream::Config& config, + newapi::Transport* transport, + webrtc::VoiceEngine* voice_engine, + int base_channel) + : transport_adapter_(transport), + encoded_frame_proxy_(config.pre_decode_callback), + config_(config), + clock_(Clock::GetRealTimeClock()), + channel_(-1) { + video_engine_base_ = ViEBase::GetInterface(video_engine); + video_engine_base_->CreateReceiveChannel(channel_, base_channel); + assert(channel_ != -1); + + rtp_rtcp_ = ViERTP_RTCP::GetInterface(video_engine); + assert(rtp_rtcp_ != NULL); + + // TODO(pbos): This is not fine grained enough... + rtp_rtcp_->SetNACKStatus(channel_, config_.rtp.nack.rtp_history_ms > 0); + rtp_rtcp_->SetKeyFrameRequestMethod(channel_, kViEKeyFrameRequestPliRtcp); + switch (config_.rtp.rtcp_mode) { + case newapi::kRtcpCompound: + rtp_rtcp_->SetRTCPStatus(channel_, kRtcpCompound_RFC4585); + break; + case newapi::kRtcpReducedSize: + rtp_rtcp_->SetRTCPStatus(channel_, kRtcpNonCompound_RFC5506); + break; + } + + assert(config_.rtp.remote_ssrc != 0); + // TODO(pbos): What's an appropriate local_ssrc for receive-only streams? + assert(config_.rtp.local_ssrc != 0); + assert(config_.rtp.remote_ssrc != config_.rtp.local_ssrc); + + rtp_rtcp_->SetLocalSSRC(channel_, config_.rtp.local_ssrc); + // TODO(pbos): Support multiple RTX, per video payload. + Config::Rtp::RtxMap::const_iterator it = config_.rtp.rtx.begin(); + if (it != config_.rtp.rtx.end()) { + assert(it->second.ssrc != 0); + assert(it->second.payload_type != 0); + + rtp_rtcp_->SetRemoteSSRCType(channel_, kViEStreamTypeRtx, it->second.ssrc); + rtp_rtcp_->SetRtxReceivePayloadType(channel_, it->second.payload_type); + } + + rtp_rtcp_->SetRembStatus(channel_, false, config_.rtp.remb); + + for (size_t i = 0; i < config_.rtp.extensions.size(); ++i) { + const std::string& extension = config_.rtp.extensions[i].name; + int id = config_.rtp.extensions[i].id; + if (extension == RtpExtension::kTOffset) { + if (rtp_rtcp_->SetReceiveTimestampOffsetStatus(channel_, true, id) != 0) + abort(); + } else if (extension == RtpExtension::kAbsSendTime) { + if (rtp_rtcp_->SetReceiveAbsoluteSendTimeStatus(channel_, true, id) != 0) + abort(); + } else { + abort(); // Unsupported extension. + } + } + + network_ = ViENetwork::GetInterface(video_engine); + assert(network_ != NULL); + + network_->RegisterSendTransport(channel_, transport_adapter_); + + codec_ = ViECodec::GetInterface(video_engine); + + for (size_t i = 0; i < config_.codecs.size(); ++i) { + if (codec_->SetReceiveCodec(channel_, config_.codecs[i]) != 0) { + // TODO(pbos): Abort gracefully, this can be a runtime error. + // Factor out to an Init() method. + abort(); + } + } + + stats_proxy_.reset(new ReceiveStatisticsProxy( + config_.rtp.local_ssrc, clock_, rtp_rtcp_, codec_, channel_)); + + if (rtp_rtcp_->RegisterReceiveChannelRtcpStatisticsCallback( + channel_, stats_proxy_.get()) != 0) + abort(); + + if (rtp_rtcp_->RegisterReceiveChannelRtpStatisticsCallback( + channel_, stats_proxy_.get()) != 0) + abort(); + + if (codec_->RegisterDecoderObserver(channel_, *stats_proxy_) != 0) + abort(); + + external_codec_ = ViEExternalCodec::GetInterface(video_engine); + for (size_t i = 0; i < config_.external_decoders.size(); ++i) { + ExternalVideoDecoder* decoder = &config_.external_decoders[i]; + if (external_codec_->RegisterExternalReceiveCodec( + channel_, + decoder->payload_type, + decoder->decoder, + decoder->renderer, + decoder->expected_delay_ms) != 0) { + // TODO(pbos): Abort gracefully? Can this be a runtime error? + abort(); + } + } + + render_ = ViERender::GetInterface(video_engine); + assert(render_ != NULL); + + render_->AddRenderCallback(channel_, this); + + if (voice_engine) { + video_engine_base_->SetVoiceEngine(voice_engine); + video_engine_base_->ConnectAudioChannel(channel_, config_.audio_channel_id); + } + + image_process_ = ViEImageProcess::GetInterface(video_engine); + if (config.pre_decode_callback) { + image_process_->RegisterPreDecodeImageCallback(channel_, + &encoded_frame_proxy_); + } + image_process_->RegisterPreRenderCallback(channel_, this); + + if (config.rtp.rtcp_xr.receiver_reference_time_report) { + rtp_rtcp_->SetRtcpXrRrtrStatus(channel_, true); + } +} + +VideoReceiveStream::~VideoReceiveStream() { + image_process_->DeRegisterPreRenderCallback(channel_); + image_process_->DeRegisterPreDecodeCallback(channel_); + + render_->RemoveRenderer(channel_); + + for (size_t i = 0; i < config_.external_decoders.size(); ++i) { + external_codec_->DeRegisterExternalReceiveCodec( + channel_, config_.external_decoders[i].payload_type); + } + + network_->DeregisterSendTransport(channel_); + + video_engine_base_->SetVoiceEngine(NULL); + image_process_->Release(); + video_engine_base_->Release(); + external_codec_->Release(); + codec_->DeregisterDecoderObserver(channel_); + rtp_rtcp_->DeregisterReceiveChannelRtpStatisticsCallback(channel_, + stats_proxy_.get()); + rtp_rtcp_->DeregisterReceiveChannelRtcpStatisticsCallback(channel_, + stats_proxy_.get()); + codec_->Release(); + network_->Release(); + render_->Release(); + rtp_rtcp_->Release(); +} + +void VideoReceiveStream::StartReceiving() { + transport_adapter_.Enable(); + if (render_->StartRender(channel_) != 0) + abort(); + if (video_engine_base_->StartReceive(channel_) != 0) + abort(); +} + +void VideoReceiveStream::StopReceiving() { + if (render_->StopRender(channel_) != 0) + abort(); + if (video_engine_base_->StopReceive(channel_) != 0) + abort(); + transport_adapter_.Disable(); +} + +VideoReceiveStream::Stats VideoReceiveStream::GetStats() const { + return stats_proxy_->GetStats(); +} + +void VideoReceiveStream::GetCurrentReceiveCodec(VideoCodec* receive_codec) { + // TODO(pbos): Implement +} + +bool VideoReceiveStream::DeliverRtcp(const uint8_t* packet, size_t length) { + return network_->ReceivedRTCPPacket( + channel_, packet, static_cast(length)) == 0; +} + +bool VideoReceiveStream::DeliverRtp(const uint8_t* packet, size_t length) { + return network_->ReceivedRTPPacket( + channel_, packet, static_cast(length), PacketTime()) == 0; +} + +void VideoReceiveStream::FrameCallback(I420VideoFrame* video_frame) { + stats_proxy_->OnDecodedFrame(); + + if (config_.pre_render_callback) + config_.pre_render_callback->FrameCallback(video_frame); +} + +int32_t VideoReceiveStream::RenderFrame(const uint32_t stream_id, + I420VideoFrame& video_frame) { + if (config_.renderer != NULL) + config_.renderer->RenderFrame( + video_frame, + video_frame.render_time_ms() - clock_->TimeInMilliseconds()); + + stats_proxy_->OnRenderedFrame(); + + return 0; +} +} // namespace internal +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/video_engine/internal/video_receive_stream.h b/media/webrtc/trunk/webrtc/video/video_receive_stream.h similarity index 53% rename from media/webrtc/trunk/webrtc/video_engine/internal/video_receive_stream.h rename to media/webrtc/trunk/webrtc/video/video_receive_stream.h index b6b53190e477..14653de401d2 100644 --- a/media/webrtc/trunk/webrtc/video_engine/internal/video_receive_stream.h +++ b/media/webrtc/trunk/webrtc/video/video_receive_stream.h @@ -8,16 +8,20 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef WEBRTC_VIDEO_ENGINE_VIDEO_RECEIVE_STREAM_IMPL_H_ -#define WEBRTC_VIDEO_ENGINE_VIDEO_RECEIVE_STREAM_IMPL_H_ +#ifndef WEBRTC_VIDEO_VIDEO_RECEIVE_STREAM_H_ +#define WEBRTC_VIDEO_VIDEO_RECEIVE_STREAM_H_ #include #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" +#include "webrtc/modules/video_render/include/video_render_defines.h" #include "webrtc/system_wrappers/interface/clock.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/video/encoded_frame_callback_adapter.h" +#include "webrtc/video/receive_statistics_proxy.h" +#include "webrtc/video/transport_adapter.h" #include "webrtc/video_engine/include/vie_render.h" -#include "webrtc/video_engine/internal/transport_adapter.h" -#include "webrtc/video_engine/new_include/video_receive_stream.h" +#include "webrtc/video_receive_stream.h" namespace webrtc { @@ -25,31 +29,38 @@ class VideoEngine; class ViEBase; class ViECodec; class ViEExternalCodec; +class ViEImageProcess; class ViENetwork; class ViERender; class ViERTP_RTCP; +class VoiceEngine; namespace internal { class VideoReceiveStream : public webrtc::VideoReceiveStream, - public webrtc::ExternalRenderer { + public I420FrameCallback, + public VideoRenderCallback { + public: VideoReceiveStream(webrtc::VideoEngine* video_engine, const VideoReceiveStream::Config& config, - newapi::Transport* transport); + newapi::Transport* transport, + webrtc::VoiceEngine* voice_engine, + int base_channel); virtual ~VideoReceiveStream(); - virtual void StartReceive() OVERRIDE; - virtual void StopReceive() OVERRIDE; + virtual void StartReceiving() OVERRIDE; + virtual void StopReceiving() OVERRIDE; + virtual Stats GetStats() const OVERRIDE; virtual void GetCurrentReceiveCodec(VideoCodec* receive_codec) OVERRIDE; - virtual int FrameSizeChange(unsigned int width, unsigned int height, - unsigned int /*number_of_streams*/) OVERRIDE; - virtual int DeliverFrame(uint8_t* frame, int buffer_size, uint32_t timestamp, - int64_t render_time, void* /*handle*/) OVERRIDE; + // Overrides I420FrameCallback. + virtual void FrameCallback(I420VideoFrame* video_frame) OVERRIDE; - virtual bool IsTextureSupported() OVERRIDE; + // Overrides VideoRenderCallback. + virtual int32_t RenderFrame(const uint32_t stream_id, + I420VideoFrame& video_frame) OVERRIDE; public: virtual bool DeliverRtcp(const uint8_t* packet, size_t length); @@ -57,6 +68,7 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream, private: TransportAdapter transport_adapter_; + EncodedFrameCallbackAdapter encoded_frame_proxy_; VideoReceiveStream::Config config_; Clock* clock_; @@ -66,14 +78,13 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream, ViENetwork* network_; ViERender* render_; ViERTP_RTCP* rtp_rtcp_; + ViEImageProcess* image_process_; + + scoped_ptr stats_proxy_; int channel_; - - // TODO(pbos): Remove VideoReceiveStream can operate on I420 frames directly. - unsigned int height_; - unsigned int width_; }; -} // internal -} // webrtc +} // namespace internal +} // namespace webrtc -#endif // WEBRTC_VIDEO_ENGINE_INTERNAL_VIDEO_RECEIVE_STREAM_H_ +#endif // WEBRTC_VIDEO_VIDEO_RECEIVE_STREAM_H_ diff --git a/media/webrtc/trunk/webrtc/video/video_send_stream.cc b/media/webrtc/trunk/webrtc/video/video_send_stream.cc new file mode 100644 index 000000000000..13839152abea --- /dev/null +++ b/media/webrtc/trunk/webrtc/video/video_send_stream.cc @@ -0,0 +1,281 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/video/video_send_stream.h" + +#include +#include + +#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" +#include "webrtc/video_engine/include/vie_base.h" +#include "webrtc/video_engine/include/vie_capture.h" +#include "webrtc/video_engine/include/vie_codec.h" +#include "webrtc/video_engine/include/vie_external_codec.h" +#include "webrtc/video_engine/include/vie_image_process.h" +#include "webrtc/video_engine/include/vie_network.h" +#include "webrtc/video_engine/include/vie_rtp_rtcp.h" +#include "webrtc/video_send_stream.h" + +namespace webrtc { +namespace internal { + +VideoSendStream::VideoSendStream(newapi::Transport* transport, + CpuOveruseObserver* overuse_observer, + webrtc::VideoEngine* video_engine, + const VideoSendStream::Config& config, + int base_channel) + : transport_adapter_(transport), + encoded_frame_proxy_(config.post_encode_callback), + codec_lock_(CriticalSectionWrapper::CreateCriticalSection()), + config_(config), + external_codec_(NULL), + channel_(-1) { + video_engine_base_ = ViEBase::GetInterface(video_engine); + video_engine_base_->CreateChannel(channel_, base_channel); + assert(channel_ != -1); + + rtp_rtcp_ = ViERTP_RTCP::GetInterface(video_engine); + assert(rtp_rtcp_ != NULL); + + assert(config_.rtp.ssrcs.size() > 0); + if (config_.suspend_below_min_bitrate) + config_.pacing = true; + rtp_rtcp_->SetTransmissionSmoothingStatus(channel_, config_.pacing); + + for (size_t i = 0; i < config_.rtp.extensions.size(); ++i) { + const std::string& extension = config_.rtp.extensions[i].name; + int id = config_.rtp.extensions[i].id; + if (extension == RtpExtension::kTOffset) { + if (rtp_rtcp_->SetSendTimestampOffsetStatus(channel_, true, id) != 0) + abort(); + } else if (extension == RtpExtension::kAbsSendTime) { + if (rtp_rtcp_->SetSendAbsoluteSendTimeStatus(channel_, true, id) != 0) + abort(); + } else { + abort(); // Unsupported extension. + } + } + + rtp_rtcp_->SetRembStatus(channel_, true, false); + + // Enable NACK, FEC or both. + if (config_.rtp.fec.red_payload_type != -1) { + assert(config_.rtp.fec.ulpfec_payload_type != -1); + if (config_.rtp.nack.rtp_history_ms > 0) { + rtp_rtcp_->SetHybridNACKFECStatus( + channel_, + true, + static_cast(config_.rtp.fec.red_payload_type), + static_cast(config_.rtp.fec.ulpfec_payload_type)); + } else { + rtp_rtcp_->SetFECStatus( + channel_, + true, + static_cast(config_.rtp.fec.red_payload_type), + static_cast(config_.rtp.fec.ulpfec_payload_type)); + } + } else { + rtp_rtcp_->SetNACKStatus(channel_, config_.rtp.nack.rtp_history_ms > 0); + } + + char rtcp_cname[ViERTP_RTCP::KMaxRTCPCNameLength]; + assert(config_.rtp.c_name.length() < ViERTP_RTCP::KMaxRTCPCNameLength); + strncpy(rtcp_cname, config_.rtp.c_name.c_str(), sizeof(rtcp_cname) - 1); + rtcp_cname[sizeof(rtcp_cname) - 1] = '\0'; + + rtp_rtcp_->SetRTCPCName(channel_, rtcp_cname); + + capture_ = ViECapture::GetInterface(video_engine); + capture_->AllocateExternalCaptureDevice(capture_id_, external_capture_); + capture_->ConnectCaptureDevice(capture_id_, channel_); + + network_ = ViENetwork::GetInterface(video_engine); + assert(network_ != NULL); + + network_->RegisterSendTransport(channel_, transport_adapter_); + // 28 to match packet overhead in ModuleRtpRtcpImpl. + network_->SetMTU(channel_, + static_cast(config_.rtp.max_packet_size + 28)); + + if (config.encoder) { + external_codec_ = ViEExternalCodec::GetInterface(video_engine); + if (external_codec_->RegisterExternalSendCodec(channel_, + config.codec.plType, + config.encoder, + config.internal_source) != + 0) { + abort(); + } + } + + codec_ = ViECodec::GetInterface(video_engine); + if (!SetCodec(config_.codec)) + abort(); + + if (overuse_observer) { + video_engine_base_->RegisterCpuOveruseObserver(channel_, overuse_observer); + } + + image_process_ = ViEImageProcess::GetInterface(video_engine); + image_process_->RegisterPreEncodeCallback(channel_, + config_.pre_encode_callback); + if (config_.post_encode_callback) { + image_process_->RegisterPostEncodeImageCallback(channel_, + &encoded_frame_proxy_); + } + + if (config.suspend_below_min_bitrate) { + codec_->SuspendBelowMinBitrate(channel_); + } + + stats_proxy_.reset(new SendStatisticsProxy(config, this)); + + rtp_rtcp_->RegisterSendChannelRtcpStatisticsCallback(channel_, + stats_proxy_.get()); + rtp_rtcp_->RegisterSendChannelRtpStatisticsCallback(channel_, + stats_proxy_.get()); + rtp_rtcp_->RegisterSendBitrateObserver(channel_, stats_proxy_.get()); + rtp_rtcp_->RegisterSendFrameCountObserver(channel_, stats_proxy_.get()); + + codec_->RegisterEncoderObserver(channel_, *stats_proxy_); + capture_->RegisterObserver(capture_id_, *stats_proxy_); +} + +VideoSendStream::~VideoSendStream() { + capture_->DeregisterObserver(capture_id_); + codec_->DeregisterEncoderObserver(channel_); + + rtp_rtcp_->DeregisterSendFrameCountObserver(channel_, stats_proxy_.get()); + rtp_rtcp_->DeregisterSendBitrateObserver(channel_, stats_proxy_.get()); + rtp_rtcp_->DeregisterSendChannelRtpStatisticsCallback(channel_, + stats_proxy_.get()); + rtp_rtcp_->DeregisterSendChannelRtcpStatisticsCallback(channel_, + stats_proxy_.get()); + + image_process_->DeRegisterPreEncodeCallback(channel_); + + network_->DeregisterSendTransport(channel_); + + capture_->DisconnectCaptureDevice(channel_); + capture_->ReleaseCaptureDevice(capture_id_); + + if (external_codec_) { + external_codec_->DeRegisterExternalSendCodec(channel_, + config_.codec.plType); + } + + video_engine_base_->DeleteChannel(channel_); + + image_process_->Release(); + video_engine_base_->Release(); + capture_->Release(); + codec_->Release(); + if (external_codec_) + external_codec_->Release(); + network_->Release(); + rtp_rtcp_->Release(); +} + +void VideoSendStream::PutFrame(const I420VideoFrame& frame) { + input_frame_.CopyFrame(frame); + SwapFrame(&input_frame_); +} + +void VideoSendStream::SwapFrame(I420VideoFrame* frame) { + // TODO(pbos): Warn if frame is "too far" into the future, or too old. This + // would help detect if frame's being used without NTP. + // TO REVIEWER: Is there any good check for this? Should it be + // skipped? + if (frame != &input_frame_) + input_frame_.SwapFrame(frame); + + // TODO(pbos): Local rendering should not be done on the capture thread. + if (config_.local_renderer != NULL) + config_.local_renderer->RenderFrame(input_frame_, 0); + + external_capture_->SwapFrame(&input_frame_); +} + +VideoSendStreamInput* VideoSendStream::Input() { return this; } + +void VideoSendStream::StartSending() { + transport_adapter_.Enable(); + video_engine_base_->StartSend(channel_); + video_engine_base_->StartReceive(channel_); +} + +void VideoSendStream::StopSending() { + video_engine_base_->StopSend(channel_); + video_engine_base_->StopReceive(channel_); + transport_adapter_.Disable(); +} + +bool VideoSendStream::SetCodec(const VideoCodec& codec) { + assert(config_.rtp.ssrcs.size() >= codec.numberOfSimulcastStreams); + + CriticalSectionScoped crit(codec_lock_.get()); + if (codec_->SetSendCodec(channel_, codec) != 0) + return false; + + for (size_t i = 0; i < config_.rtp.ssrcs.size(); ++i) { + rtp_rtcp_->SetLocalSSRC(channel_, + config_.rtp.ssrcs[i], + kViEStreamTypeNormal, + static_cast(i)); + } + + if (&config_.codec != &codec) + config_.codec = codec; + + if (config_.rtp.rtx.ssrcs.empty()) + return true; + + // Set up RTX. + assert(config_.rtp.rtx.ssrcs.size() == config_.rtp.ssrcs.size()); + for (size_t i = 0; i < config_.rtp.ssrcs.size(); ++i) { + rtp_rtcp_->SetLocalSSRC(channel_, + config_.rtp.rtx.ssrcs[i], + kViEStreamTypeRtx, + static_cast(i)); + } + + if (config_.rtp.rtx.payload_type != 0) + rtp_rtcp_->SetRtxSendPayloadType(channel_, config_.rtp.rtx.payload_type); + + return true; +} + +VideoCodec VideoSendStream::GetCodec() { + CriticalSectionScoped crit(codec_lock_.get()); + return config_.codec; +} + +bool VideoSendStream::DeliverRtcp(const uint8_t* packet, size_t length) { + return network_->ReceivedRTCPPacket( + channel_, packet, static_cast(length)) == 0; +} + +VideoSendStream::Stats VideoSendStream::GetStats() const { + return stats_proxy_->GetStats(); +} + +bool VideoSendStream::GetSendSideDelay(VideoSendStream::Stats* stats) { + return codec_->GetSendSideDelay( + channel_, &stats->avg_delay_ms, &stats->max_delay_ms); +} + +std::string VideoSendStream::GetCName() { + char rtcp_cname[ViERTP_RTCP::KMaxRTCPCNameLength]; + rtp_rtcp_->GetRTCPCName(channel_, rtcp_cname); + return rtcp_cname; +} + +} // namespace internal +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/video/video_send_stream.h b/media/webrtc/trunk/webrtc/video/video_send_stream.h new file mode 100644 index 000000000000..3ea4fbfb60be --- /dev/null +++ b/media/webrtc/trunk/webrtc/video/video_send_stream.h @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_VIDEO_VIDEO_SEND_STREAM_H_ +#define WEBRTC_VIDEO_VIDEO_SEND_STREAM_H_ + +#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" +#include "webrtc/video/encoded_frame_callback_adapter.h" +#include "webrtc/video/send_statistics_proxy.h" +#include "webrtc/video/transport_adapter.h" +#include "webrtc/video_receive_stream.h" +#include "webrtc/video_send_stream.h" +#include "webrtc/system_wrappers/interface/critical_section_wrapper.h" + +namespace webrtc { + +class CpuOveruseObserver; +class VideoEngine; +class ViEBase; +class ViECapture; +class ViECodec; +class ViEExternalCapture; +class ViEExternalCodec; +class ViEImageProcess; +class ViENetwork; +class ViERTP_RTCP; + +namespace internal { + +class VideoSendStream : public webrtc::VideoSendStream, + public VideoSendStreamInput, + public SendStatisticsProxy::StatsProvider { + public: + VideoSendStream(newapi::Transport* transport, + CpuOveruseObserver* overuse_observer, + webrtc::VideoEngine* video_engine, + const VideoSendStream::Config& config, + int base_channel); + + virtual ~VideoSendStream(); + + virtual void StartSending() OVERRIDE; + + virtual void StopSending() OVERRIDE; + + virtual bool SetCodec(const VideoCodec& codec) OVERRIDE; + virtual VideoCodec GetCodec() OVERRIDE; + + virtual Stats GetStats() const OVERRIDE; + + bool DeliverRtcp(const uint8_t* packet, size_t length); + + // From VideoSendStreamInput. + virtual void PutFrame(const I420VideoFrame& frame) OVERRIDE; + virtual void SwapFrame(I420VideoFrame* frame) OVERRIDE; + + // From webrtc::VideoSendStream. + virtual VideoSendStreamInput* Input() OVERRIDE; + + protected: + // From SendStatisticsProxy::StreamStatsProvider. + virtual bool GetSendSideDelay(VideoSendStream::Stats* stats) OVERRIDE; + virtual std::string GetCName() OVERRIDE; + + private: + I420VideoFrame input_frame_; + TransportAdapter transport_adapter_; + EncodedFrameCallbackAdapter encoded_frame_proxy_; + scoped_ptr codec_lock_; + VideoSendStream::Config config_; + + ViEBase* video_engine_base_; + ViECapture* capture_; + ViECodec* codec_; + ViEExternalCapture* external_capture_; + ViEExternalCodec* external_codec_; + ViENetwork* network_; + ViERTP_RTCP* rtp_rtcp_; + ViEImageProcess* image_process_; + + int channel_; + int capture_id_; + + scoped_ptr stats_proxy_; +}; +} // namespace internal +} // namespace webrtc + +#endif // WEBRTC_VIDEO_VIDEO_SEND_STREAM_H_ diff --git a/media/webrtc/trunk/webrtc/video/video_send_stream_tests.cc b/media/webrtc/trunk/webrtc/video/video_send_stream_tests.cc new file mode 100644 index 000000000000..f0c190ea3f92 --- /dev/null +++ b/media/webrtc/trunk/webrtc/video/video_send_stream_tests.cc @@ -0,0 +1,1145 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include // max + +#include "testing/gtest/include/gtest/gtest.h" + +#include "webrtc/call.h" +#include "webrtc/common_video/interface/i420_video_frame.h" +#include "webrtc/frame_callback.h" +#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h" +#include "webrtc/modules/rtp_rtcp/source/rtcp_sender.h" +#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h" +#include "webrtc/system_wrappers/interface/critical_section_wrapper.h" +#include "webrtc/system_wrappers/interface/event_wrapper.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/system_wrappers/interface/sleep.h" +#include "webrtc/system_wrappers/interface/thread_wrapper.h" +#include "webrtc/test/direct_transport.h" +#include "webrtc/test/fake_encoder.h" +#include "webrtc/test/configurable_frame_size_encoder.h" +#include "webrtc/test/frame_generator_capturer.h" +#include "webrtc/test/null_transport.h" +#include "webrtc/test/rtp_rtcp_observer.h" +#include "webrtc/video/transport_adapter.h" +#include "webrtc/video_send_stream.h" + +namespace webrtc { + +enum VideoFormat { kGeneric, kVP8, }; + +class VideoSendStreamTest : public ::testing::Test { + public: + VideoSendStreamTest() + : send_stream_(NULL), fake_encoder_(Clock::GetRealTimeClock()) {} + + protected: + void RunSendTest(Call* call, + const VideoSendStream::Config& config, + test::RtpRtcpObserver* observer) { + send_stream_ = call->CreateVideoSendStream(config); + scoped_ptr frame_generator_capturer( + test::FrameGeneratorCapturer::Create( + send_stream_->Input(), 320, 240, 30, Clock::GetRealTimeClock())); + send_stream_->StartSending(); + frame_generator_capturer->Start(); + + EXPECT_EQ(kEventSignaled, observer->Wait()); + + observer->StopSending(); + frame_generator_capturer->Stop(); + send_stream_->StopSending(); + call->DestroyVideoSendStream(send_stream_); + } + + VideoSendStream::Config GetSendTestConfig(Call* call, + size_t number_of_streams) { + assert(number_of_streams <= kNumSendSsrcs); + VideoSendStream::Config config = call->GetDefaultSendConfig(); + config.encoder = &fake_encoder_; + config.internal_source = false; + for (size_t i = 0; i < number_of_streams; ++i) + config.rtp.ssrcs.push_back(kSendSsrcs[i]); + config.pacing = true; + test::FakeEncoder::SetCodecSettings(&config.codec, number_of_streams); + config.codec.plType = kFakeSendPayloadType; + return config; + } + + void TestNackRetransmission(uint32_t retransmit_ssrc, + uint8_t retransmit_payload_type, + bool enable_pacing); + + void TestPacketFragmentationSize(VideoFormat format, bool with_fec); + + void SendsSetSsrcs(size_t num_ssrcs, bool send_single_ssrc_first); + + enum { kNumSendSsrcs = 3 }; + static const uint8_t kSendPayloadType; + static const uint8_t kSendRtxPayloadType; + static const uint8_t kFakeSendPayloadType; + static const uint32_t kSendSsrc; + static const uint32_t kSendRtxSsrc; + static const uint32_t kSendSsrcs[kNumSendSsrcs]; + + VideoSendStream* send_stream_; + test::FakeEncoder fake_encoder_; +}; + +const uint8_t VideoSendStreamTest::kSendPayloadType = 100; +const uint8_t VideoSendStreamTest::kFakeSendPayloadType = 125; +const uint8_t VideoSendStreamTest::kSendRtxPayloadType = 98; +const uint32_t VideoSendStreamTest::kSendRtxSsrc = 0xBADCAFE; +const uint32_t VideoSendStreamTest::kSendSsrcs[kNumSendSsrcs] = { + 0xC0FFED, 0xC0FFEE, 0xC0FFEF}; +const uint32_t VideoSendStreamTest::kSendSsrc = + VideoSendStreamTest::kSendSsrcs[0]; + +void VideoSendStreamTest::SendsSetSsrcs(size_t num_ssrcs, + bool send_single_ssrc_first) { + class SendSsrcObserver : public test::RtpRtcpObserver { + public: + SendSsrcObserver(const uint32_t* ssrcs, + size_t num_ssrcs, + bool send_single_ssrc_first) + : RtpRtcpObserver(30 * 1000), + ssrcs_to_observe_(num_ssrcs), + expect_single_ssrc_(send_single_ssrc_first) { + for (size_t i = 0; i < num_ssrcs; ++i) + valid_ssrcs_[ssrcs[i]] = true; + } + + virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE { + RTPHeader header; + EXPECT_TRUE(parser_->Parse(packet, static_cast(length), &header)); + + // TODO(pbos): Reenable this part of the test when #1695 is resolved and + // all SSRCs are allocated on startup. This test was observed + // to fail on TSan as the codec gets set before the SSRCs are + // set up and some frames are sent on a random-generated SSRC + // before the correct SSRC gets set. + // EXPECT_TRUE(valid_ssrcs_[header.ssrc]) + // << "Received unknown SSRC: " << header.ssrc; + // + // if (!valid_ssrcs_[header.ssrc]) + // observation_complete_->Set(); + + if (!is_observed_[header.ssrc]) { + is_observed_[header.ssrc] = true; + --ssrcs_to_observe_; + if (expect_single_ssrc_) { + expect_single_ssrc_ = false; + observation_complete_->Set(); + } + } + + if (ssrcs_to_observe_ == 0) + observation_complete_->Set(); + + return SEND_PACKET; + } + + private: + std::map valid_ssrcs_; + std::map is_observed_; + size_t ssrcs_to_observe_; + bool expect_single_ssrc_; + } observer(kSendSsrcs, num_ssrcs, send_single_ssrc_first); + + Call::Config call_config(observer.SendTransport()); + scoped_ptr call(Call::Create(call_config)); + + VideoSendStream::Config send_config = + GetSendTestConfig(call.get(), num_ssrcs); + + if (num_ssrcs > 1) { + // Set low simulcast bitrates to not have to wait for bandwidth ramp-up. + for (size_t i = 0; i < num_ssrcs; ++i) { + send_config.codec.simulcastStream[i].minBitrate = 10; + send_config.codec.simulcastStream[i].targetBitrate = 10; + send_config.codec.simulcastStream[i].maxBitrate = 10; + } + } + + if (send_single_ssrc_first) + send_config.codec.numberOfSimulcastStreams = 1; + + send_stream_ = call->CreateVideoSendStream(send_config); + scoped_ptr frame_generator_capturer( + test::FrameGeneratorCapturer::Create( + send_stream_->Input(), 320, 240, 30, Clock::GetRealTimeClock())); + send_stream_->StartSending(); + frame_generator_capturer->Start(); + + EXPECT_EQ(kEventSignaled, observer.Wait()) + << "Timed out while waiting for " + << (send_single_ssrc_first ? "first SSRC." : "SSRCs."); + + if (send_single_ssrc_first) { + // Set full simulcast and continue with the rest of the SSRCs. + send_config.codec.numberOfSimulcastStreams = + static_cast(num_ssrcs); + send_stream_->SetCodec(send_config.codec); + EXPECT_EQ(kEventSignaled, observer.Wait()) + << "Timed out while waiting on additional SSRCs."; + } + + observer.StopSending(); + frame_generator_capturer->Stop(); + send_stream_->StopSending(); + call->DestroyVideoSendStream(send_stream_); +} + +TEST_F(VideoSendStreamTest, CanStartStartedStream) { + test::NullTransport transport; + Call::Config call_config(&transport); + scoped_ptr call(Call::Create(call_config)); + + VideoSendStream::Config config = GetSendTestConfig(call.get(), 1); + VideoSendStream* stream = call->CreateVideoSendStream(config); + stream->StartSending(); + stream->StartSending(); + call->DestroyVideoSendStream(stream); +} + +TEST_F(VideoSendStreamTest, CanStopStoppedStream) { + test::NullTransport transport; + Call::Config call_config(&transport); + scoped_ptr call(Call::Create(call_config)); + + VideoSendStream::Config config = GetSendTestConfig(call.get(), 1); + VideoSendStream* stream = call->CreateVideoSendStream(config); + stream->StopSending(); + stream->StopSending(); + call->DestroyVideoSendStream(stream); +} + +TEST_F(VideoSendStreamTest, SendsSetSsrc) { SendsSetSsrcs(1, false); } + +TEST_F(VideoSendStreamTest, SendsSetSimulcastSsrcs) { + SendsSetSsrcs(kNumSendSsrcs, false); +} + +TEST_F(VideoSendStreamTest, CanSwitchToUseAllSsrcs) { + SendsSetSsrcs(kNumSendSsrcs, true); +} + +TEST_F(VideoSendStreamTest, SupportsCName) { + static std::string kCName = "PjQatC14dGfbVwGPUOA9IH7RlsFDbWl4AhXEiDsBizo="; + class CNameObserver : public test::RtpRtcpObserver { + public: + CNameObserver() : RtpRtcpObserver(30 * 1000) {} + + virtual Action OnSendRtcp(const uint8_t* packet, size_t length) OVERRIDE { + RTCPUtility::RTCPParserV2 parser(packet, length, true); + EXPECT_TRUE(parser.IsValid()); + + RTCPUtility::RTCPPacketTypes packet_type = parser.Begin(); + while (packet_type != RTCPUtility::kRtcpNotValidCode) { + if (packet_type == RTCPUtility::kRtcpSdesChunkCode) { + EXPECT_EQ(parser.Packet().CName.CName, kCName); + observation_complete_->Set(); + } + + packet_type = parser.Iterate(); + } + + return SEND_PACKET; + } + } observer; + + Call::Config call_config(observer.SendTransport()); + scoped_ptr call(Call::Create(call_config)); + + VideoSendStream::Config send_config = GetSendTestConfig(call.get(), 1); + send_config.rtp.c_name = kCName; + + RunSendTest(call.get(), send_config, &observer); +} + +TEST_F(VideoSendStreamTest, SupportsAbsoluteSendTime) { + static const uint8_t kAbsSendTimeExtensionId = 13; + class AbsoluteSendTimeObserver : public test::RtpRtcpObserver { + public: + AbsoluteSendTimeObserver() : RtpRtcpObserver(30 * 1000) { + EXPECT_TRUE(parser_->RegisterRtpHeaderExtension( + kRtpExtensionAbsoluteSendTime, kAbsSendTimeExtensionId)); + } + + virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE { + RTPHeader header; + EXPECT_TRUE(parser_->Parse(packet, static_cast(length), &header)); + + EXPECT_FALSE(header.extension.hasTransmissionTimeOffset); + EXPECT_TRUE(header.extension.hasAbsoluteSendTime); + EXPECT_EQ(header.extension.transmissionTimeOffset, 0); + EXPECT_GT(header.extension.absoluteSendTime, 0u); + observation_complete_->Set(); + + return SEND_PACKET; + } + } observer; + + Call::Config call_config(observer.SendTransport()); + scoped_ptr call(Call::Create(call_config)); + + VideoSendStream::Config send_config = GetSendTestConfig(call.get(), 1); + send_config.rtp.extensions.push_back( + RtpExtension(RtpExtension::kAbsSendTime, kAbsSendTimeExtensionId)); + + RunSendTest(call.get(), send_config, &observer); +} + +TEST_F(VideoSendStreamTest, SupportsTransmissionTimeOffset) { + static const uint8_t kTOffsetExtensionId = 13; + class DelayedEncoder : public test::FakeEncoder { + public: + explicit DelayedEncoder(Clock* clock) : test::FakeEncoder(clock) {} + virtual int32_t Encode(const I420VideoFrame& input_image, + const CodecSpecificInfo* codec_specific_info, + const std::vector* frame_types) + OVERRIDE { + // A delay needs to be introduced to assure that we get a timestamp + // offset. + SleepMs(5); + return FakeEncoder::Encode(input_image, codec_specific_info, frame_types); + } + } encoder(Clock::GetRealTimeClock()); + + class TransmissionTimeOffsetObserver : public test::RtpRtcpObserver { + public: + TransmissionTimeOffsetObserver() : RtpRtcpObserver(30 * 1000) { + EXPECT_TRUE(parser_->RegisterRtpHeaderExtension( + kRtpExtensionTransmissionTimeOffset, kTOffsetExtensionId)); + } + + virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE { + RTPHeader header; + EXPECT_TRUE(parser_->Parse(packet, static_cast(length), &header)); + + EXPECT_TRUE(header.extension.hasTransmissionTimeOffset); + EXPECT_FALSE(header.extension.hasAbsoluteSendTime); + EXPECT_GT(header.extension.transmissionTimeOffset, 0); + EXPECT_EQ(header.extension.absoluteSendTime, 0u); + observation_complete_->Set(); + + return SEND_PACKET; + } + } observer; + + Call::Config call_config(observer.SendTransport()); + scoped_ptr call(Call::Create(call_config)); + + VideoSendStream::Config send_config = GetSendTestConfig(call.get(), 1); + send_config.encoder = &encoder; + send_config.rtp.extensions.push_back( + RtpExtension(RtpExtension::kTOffset, kTOffsetExtensionId)); + + RunSendTest(call.get(), send_config, &observer); +} + +class FakeReceiveStatistics : public NullReceiveStatistics { + public: + FakeReceiveStatistics(uint32_t send_ssrc, + uint32_t last_sequence_number, + uint32_t cumulative_lost, + uint8_t fraction_lost) + : lossy_stats_(new LossyStatistician(last_sequence_number, + cumulative_lost, + fraction_lost)) { + stats_map_[send_ssrc] = lossy_stats_.get(); + } + + virtual StatisticianMap GetActiveStatisticians() const OVERRIDE { + return stats_map_; + } + + virtual StreamStatistician* GetStatistician(uint32_t ssrc) const OVERRIDE { + return lossy_stats_.get(); + } + + private: + class LossyStatistician : public StreamStatistician { + public: + LossyStatistician(uint32_t extended_max_sequence_number, + uint32_t cumulative_lost, + uint8_t fraction_lost) { + stats_.fraction_lost = fraction_lost; + stats_.cumulative_lost = cumulative_lost; + stats_.extended_max_sequence_number = extended_max_sequence_number; + } + virtual bool GetStatistics(RtcpStatistics* statistics, + bool reset) OVERRIDE { + *statistics = stats_; + return true; + } + virtual void GetDataCounters(uint32_t* bytes_received, + uint32_t* packets_received) const OVERRIDE { + *bytes_received = 0; + *packets_received = 0; + } + virtual uint32_t BitrateReceived() const OVERRIDE { return 0; } + virtual void ResetStatistics() OVERRIDE {} + virtual bool IsRetransmitOfOldPacket(const RTPHeader& header, + int min_rtt) const OVERRIDE { + return false; + } + + virtual bool IsPacketInOrder(uint16_t sequence_number) const OVERRIDE { + return true; + } + + RtcpStatistics stats_; + }; + + scoped_ptr lossy_stats_; + StatisticianMap stats_map_; +}; + +TEST_F(VideoSendStreamTest, SwapsI420VideoFrames) { + static const size_t kWidth = 320; + static const size_t kHeight = 240; + + test::NullTransport transport; + Call::Config call_config(&transport); + scoped_ptr call(Call::Create(call_config)); + + VideoSendStream::Config send_config = GetSendTestConfig(call.get(), 1); + VideoSendStream* video_send_stream = call->CreateVideoSendStream(send_config); + video_send_stream->StartSending(); + + I420VideoFrame frame; + frame.CreateEmptyFrame( + kWidth, kHeight, kWidth, (kWidth + 1) / 2, (kWidth + 1) / 2); + uint8_t* old_y_buffer = frame.buffer(kYPlane); + + video_send_stream->Input()->SwapFrame(&frame); + + EXPECT_NE(frame.buffer(kYPlane), old_y_buffer); + + call->DestroyVideoSendStream(video_send_stream); +} + +TEST_F(VideoSendStreamTest, SupportsFec) { + static const int kRedPayloadType = 118; + static const int kUlpfecPayloadType = 119; + class FecObserver : public test::RtpRtcpObserver { + public: + FecObserver() + : RtpRtcpObserver(30 * 1000), + transport_adapter_(SendTransport()), + send_count_(0), + received_media_(false), + received_fec_(false) { + transport_adapter_.Enable(); + } + + virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE { + RTPHeader header; + EXPECT_TRUE(parser_->Parse(packet, static_cast(length), &header)); + + // Send lossy receive reports to trigger FEC enabling. + if (send_count_++ % 2 != 0) { + // Receive statistics reporting having lost 50% of the packets. + FakeReceiveStatistics lossy_receive_stats( + kSendSsrc, header.sequenceNumber, send_count_ / 2, 127); + RTCPSender rtcp_sender( + 0, false, Clock::GetRealTimeClock(), &lossy_receive_stats); + EXPECT_EQ(0, rtcp_sender.RegisterSendTransport(&transport_adapter_)); + + rtcp_sender.SetRTCPStatus(kRtcpNonCompound); + rtcp_sender.SetRemoteSSRC(kSendSsrc); + + RTCPSender::FeedbackState feedback_state; + + EXPECT_EQ(0, rtcp_sender.SendRTCP(feedback_state, kRtcpRr)); + } + + EXPECT_EQ(kRedPayloadType, header.payloadType); + + uint8_t encapsulated_payload_type = packet[header.headerLength]; + + if (encapsulated_payload_type == kUlpfecPayloadType) { + received_fec_ = true; + } else { + received_media_ = true; + } + + if (received_media_ && received_fec_) + observation_complete_->Set(); + + return SEND_PACKET; + } + + private: + internal::TransportAdapter transport_adapter_; + int send_count_; + bool received_media_; + bool received_fec_; + } observer; + + Call::Config call_config(observer.SendTransport()); + scoped_ptr call(Call::Create(call_config)); + + observer.SetReceivers(call->Receiver(), NULL); + + VideoSendStream::Config send_config = GetSendTestConfig(call.get(), 1); + send_config.rtp.fec.red_payload_type = kRedPayloadType; + send_config.rtp.fec.ulpfec_payload_type = kUlpfecPayloadType; + + RunSendTest(call.get(), send_config, &observer); +} + +void VideoSendStreamTest::TestNackRetransmission( + uint32_t retransmit_ssrc, + uint8_t retransmit_payload_type, + bool enable_pacing) { + class NackObserver : public test::RtpRtcpObserver { + public: + explicit NackObserver(uint32_t retransmit_ssrc, + uint8_t retransmit_payload_type) + : RtpRtcpObserver(30 * 1000), + transport_adapter_(SendTransport()), + send_count_(0), + retransmit_ssrc_(retransmit_ssrc), + retransmit_payload_type_(retransmit_payload_type), + nacked_sequence_number_(-1) { + transport_adapter_.Enable(); + } + + virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE { + RTPHeader header; + EXPECT_TRUE(parser_->Parse(packet, static_cast(length), &header)); + + // Nack second packet after receiving the third one. + if (++send_count_ == 3) { + uint16_t nack_sequence_number = header.sequenceNumber - 1; + nacked_sequence_number_ = nack_sequence_number; + NullReceiveStatistics null_stats; + RTCPSender rtcp_sender( + 0, false, Clock::GetRealTimeClock(), &null_stats); + EXPECT_EQ(0, rtcp_sender.RegisterSendTransport(&transport_adapter_)); + + rtcp_sender.SetRTCPStatus(kRtcpNonCompound); + rtcp_sender.SetRemoteSSRC(kSendSsrc); + + RTCPSender::FeedbackState feedback_state; + + EXPECT_EQ(0, + rtcp_sender.SendRTCP( + feedback_state, kRtcpNack, 1, &nack_sequence_number)); + } + + uint16_t sequence_number = header.sequenceNumber; + + if (header.ssrc == retransmit_ssrc_ && retransmit_ssrc_ != kSendSsrc) { + // Not kSendSsrc, assume correct RTX packet. Extract sequence number. + const uint8_t* rtx_header = packet + header.headerLength; + sequence_number = (rtx_header[0] << 8) + rtx_header[1]; + } + + if (sequence_number == nacked_sequence_number_) { + EXPECT_EQ(retransmit_ssrc_, header.ssrc); + EXPECT_EQ(retransmit_payload_type_, header.payloadType); + observation_complete_->Set(); + } + + return SEND_PACKET; + } + + private: + internal::TransportAdapter transport_adapter_; + int send_count_; + uint32_t retransmit_ssrc_; + uint8_t retransmit_payload_type_; + int nacked_sequence_number_; + } observer(retransmit_ssrc, retransmit_payload_type); + + Call::Config call_config(observer.SendTransport()); + scoped_ptr call(Call::Create(call_config)); + observer.SetReceivers(call->Receiver(), NULL); + + VideoSendStream::Config send_config = GetSendTestConfig(call.get(), 1); + send_config.rtp.nack.rtp_history_ms = 1000; + send_config.rtp.rtx.payload_type = retransmit_payload_type; + send_config.pacing = enable_pacing; + if (retransmit_ssrc != kSendSsrc) + send_config.rtp.rtx.ssrcs.push_back(retransmit_ssrc); + + RunSendTest(call.get(), send_config, &observer); +} + +TEST_F(VideoSendStreamTest, RetransmitsNack) { + // Normal NACKs should use the send SSRC. + TestNackRetransmission(kSendSsrc, kFakeSendPayloadType, false); +} + +TEST_F(VideoSendStreamTest, RetransmitsNackOverRtx) { + // NACKs over RTX should use a separate SSRC. + TestNackRetransmission(kSendRtxSsrc, kSendRtxPayloadType, false); +} + +TEST_F(VideoSendStreamTest, RetransmitsNackOverRtxWithPacing) { + // NACKs over RTX should use a separate SSRC. + TestNackRetransmission(kSendRtxSsrc, kSendRtxPayloadType, true); +} + +void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format, + bool with_fec) { + static const int kRedPayloadType = 118; + static const int kUlpfecPayloadType = 119; + // Observer that verifies that the expected number of packets and bytes + // arrive for each frame size, from start_size to stop_size. + class FrameFragmentationObserver : public test::RtpRtcpObserver, + public EncodedFrameObserver { + public: + FrameFragmentationObserver(uint32_t max_packet_size, + uint32_t start_size, + uint32_t stop_size, + test::ConfigurableFrameSizeEncoder* encoder, + bool test_generic_packetization, + bool use_fec) + : RtpRtcpObserver(120 * 1000), // Timeout after two minutes. + transport_adapter_(SendTransport()), + encoder_(encoder), + max_packet_size_(max_packet_size), + stop_size_(stop_size), + test_generic_packetization_(test_generic_packetization), + use_fec_(use_fec), + packet_count_(0), + accumulated_size_(0), + accumulated_payload_(0), + fec_packet_received_(false), + current_size_rtp_(start_size), + current_size_frame_(start_size) { + // Fragmentation required, this test doesn't make sense without it. + assert(stop_size > max_packet_size); + transport_adapter_.Enable(); + } + + virtual Action OnSendRtp(const uint8_t* packet, size_t size) OVERRIDE { + uint32_t length = static_cast(size); + RTPHeader header; + EXPECT_TRUE(parser_->Parse(packet, length, &header)); + + EXPECT_LE(length, max_packet_size_); + + if (use_fec_) { + uint8_t payload_type = packet[header.headerLength]; + bool is_fec = header.payloadType == kRedPayloadType && + payload_type == kUlpfecPayloadType; + if (is_fec) { + fec_packet_received_ = true; + return SEND_PACKET; + } + } + + accumulated_size_ += length; + + if (use_fec_) + TriggerLossReport(header); + + if (test_generic_packetization_) { + uint32_t overhead = header.headerLength + header.paddingLength + + (1 /* Generic header */); + if (use_fec_) + overhead += 1; // RED for FEC header. + accumulated_payload_ += length - overhead; + } + + // Marker bit set indicates last packet of a frame. + if (header.markerBit) { + if (use_fec_ && accumulated_payload_ == current_size_rtp_ - 1) { + // With FEC enabled, frame size is incremented asynchronously, so + // "old" frames one byte too small may arrive. Accept, but don't + // increase expected frame size. + accumulated_size_ = 0; + accumulated_payload_ = 0; + return SEND_PACKET; + } + + EXPECT_GE(accumulated_size_, current_size_rtp_); + if (test_generic_packetization_) { + EXPECT_EQ(current_size_rtp_, accumulated_payload_); + } + + // Last packet of frame; reset counters. + accumulated_size_ = 0; + accumulated_payload_ = 0; + if (current_size_rtp_ == stop_size_) { + // Done! (Don't increase size again, might arrive more @ stop_size). + observation_complete_->Set(); + } else { + // Increase next expected frame size. If testing with FEC, make sure + // a FEC packet has been received for this frame size before + // proceeding, to make sure that redundancy packets don't exceed + // size limit. + if (!use_fec_) { + ++current_size_rtp_; + } else if (fec_packet_received_) { + fec_packet_received_ = false; + ++current_size_rtp_; + ++current_size_frame_; + } + } + } + + return SEND_PACKET; + } + + void TriggerLossReport(const RTPHeader& header) { + // Send lossy receive reports to trigger FEC enabling. + if (packet_count_++ % 2 != 0) { + // Receive statistics reporting having lost 50% of the packets. + FakeReceiveStatistics lossy_receive_stats( + kSendSsrc, header.sequenceNumber, packet_count_ / 2, 127); + RTCPSender rtcp_sender( + 0, false, Clock::GetRealTimeClock(), &lossy_receive_stats); + EXPECT_EQ(0, rtcp_sender.RegisterSendTransport(&transport_adapter_)); + + rtcp_sender.SetRTCPStatus(kRtcpNonCompound); + rtcp_sender.SetRemoteSSRC(kSendSsrc); + + RTCPSender::FeedbackState feedback_state; + + EXPECT_EQ(0, rtcp_sender.SendRTCP(feedback_state, kRtcpRr)); + } + } + + virtual void EncodedFrameCallback(const EncodedFrame& encoded_frame) { + // Increase frame size for next encoded frame, in the context of the + // encoder thread. + if (!use_fec_ && + current_size_frame_.Value() < static_cast(stop_size_)) { + ++current_size_frame_; + } + encoder_->SetFrameSize(current_size_frame_.Value()); + } + + private: + internal::TransportAdapter transport_adapter_; + test::ConfigurableFrameSizeEncoder* const encoder_; + + const uint32_t max_packet_size_; + const uint32_t stop_size_; + const bool test_generic_packetization_; + const bool use_fec_; + + uint32_t packet_count_; + uint32_t accumulated_size_; + uint32_t accumulated_payload_; + bool fec_packet_received_; + + uint32_t current_size_rtp_; + Atomic32 current_size_frame_; + }; + + // Use a fake encoder to output a frame of every size in the range [90, 290], + // for each size making sure that the exact number of payload bytes received + // is correct and that packets are fragmented to respect max packet size. + static const uint32_t kMaxPacketSize = 128; + static const uint32_t start = 90; + static const uint32_t stop = 290; + + // Don't auto increment if FEC is used; continue sending frame size until + // a FEC packet has been received. + test::ConfigurableFrameSizeEncoder encoder(stop); + encoder.SetFrameSize(start); + + FrameFragmentationObserver observer( + kMaxPacketSize, start, stop, &encoder, format == kGeneric, with_fec); + Call::Config call_config(observer.SendTransport()); + scoped_ptr call(Call::Create(call_config)); + + observer.SetReceivers(call->Receiver(), NULL); + + VideoSendStream::Config send_config = GetSendTestConfig(call.get(), 1); + if (with_fec) { + send_config.rtp.fec.red_payload_type = kRedPayloadType; + send_config.rtp.fec.ulpfec_payload_type = kUlpfecPayloadType; + } + + if (format == kVP8) { + strcpy(send_config.codec.plName, "VP8"); + send_config.codec.codecType = kVideoCodecVP8; + } + send_config.pacing = false; + send_config.encoder = &encoder; + send_config.rtp.max_packet_size = kMaxPacketSize; + send_config.post_encode_callback = &observer; + + // Add an extension header, to make the RTP header larger than the base + // length of 12 bytes. + static const uint8_t kAbsSendTimeExtensionId = 13; + send_config.rtp.extensions.push_back( + RtpExtension(RtpExtension::kAbsSendTime, kAbsSendTimeExtensionId)); + + RunSendTest(call.get(), send_config, &observer); +} + +// TODO(sprang): Is there any way of speeding up these tests? +TEST_F(VideoSendStreamTest, FragmentsGenericAccordingToMaxPacketSize) { + TestPacketFragmentationSize(kGeneric, false); +} + +TEST_F(VideoSendStreamTest, FragmentsGenericAccordingToMaxPacketSizeWithFec) { + TestPacketFragmentationSize(kGeneric, true); +} + +TEST_F(VideoSendStreamTest, FragmentsVp8AccordingToMaxPacketSize) { + TestPacketFragmentationSize(kVP8, false); +} + +TEST_F(VideoSendStreamTest, FragmentsVp8AccordingToMaxPacketSizeWithFec) { + TestPacketFragmentationSize(kVP8, true); +} + +TEST_F(VideoSendStreamTest, CanChangeSendCodec) { + static const uint8_t kFirstPayloadType = 121; + static const uint8_t kSecondPayloadType = 122; + + class CodecChangeObserver : public test::RtpRtcpObserver { + public: + CodecChangeObserver(VideoSendStream** send_stream_ptr) + : RtpRtcpObserver(30 * 1000), + received_first_payload_(EventWrapper::Create()), + send_stream_ptr_(send_stream_ptr) {} + + virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE { + RTPHeader header; + EXPECT_TRUE(parser_->Parse(packet, static_cast(length), &header)); + + if (header.payloadType == kFirstPayloadType) { + received_first_payload_->Set(); + } else if (header.payloadType == kSecondPayloadType) { + observation_complete_->Set(); + } + + return SEND_PACKET; + } + + virtual EventTypeWrapper Wait() OVERRIDE { + EXPECT_EQ(kEventSignaled, received_first_payload_->Wait(30 * 1000)) + << "Timed out while waiting for first payload."; + + EXPECT_TRUE((*send_stream_ptr_)->SetCodec(second_codec_)); + + EXPECT_EQ(kEventSignaled, RtpRtcpObserver::Wait()) + << "Timed out while waiting for second payload type."; + + // Return OK regardless, prevents double error reporting. + return kEventSignaled; + } + + void SetSecondCodec(const VideoCodec& codec) { second_codec_ = codec; } + + private: + scoped_ptr received_first_payload_; + VideoSendStream** send_stream_ptr_; + VideoCodec second_codec_; + } observer(&send_stream_); + + Call::Config call_config(observer.SendTransport()); + scoped_ptr call(Call::Create(call_config)); + + std::vector codecs = call->GetVideoCodecs(); + ASSERT_GE(codecs.size(), 2u) + << "Test needs at least 2 separate codecs to work."; + codecs[0].plType = kFirstPayloadType; + codecs[1].plType = kSecondPayloadType; + observer.SetSecondCodec(codecs[1]); + + VideoSendStream::Config send_config = GetSendTestConfig(call.get(), 1); + send_config.codec = codecs[0]; + send_config.encoder = NULL; + + RunSendTest(call.get(), send_config, &observer); +} + +// The test will go through a number of phases. +// 1. Start sending packets. +// 2. As soon as the RTP stream has been detected, signal a low REMB value to +// suspend the stream. +// 3. Wait until |kSuspendTimeFrames| have been captured without seeing any RTP +// packets. +// 4. Signal a high REMB and then wait for the RTP stream to start again. +// When the stream is detected again, the test ends. +TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) { + static const int kSuspendTimeFrames = 60; // Suspend for 2 seconds @ 30 fps. + + class RembObserver : public test::RtpRtcpObserver, public I420FrameCallback { + public: + RembObserver() + : RtpRtcpObserver(30 * 1000), // Timeout after 30 seconds. + transport_adapter_(&transport_), + clock_(Clock::GetRealTimeClock()), + test_state_(kBeforeSuspend), + rtp_count_(0), + last_sequence_number_(0), + suspended_frame_count_(0), + low_remb_bps_(0), + high_remb_bps_(0), + crit_sect_(CriticalSectionWrapper::CreateCriticalSection()) { + transport_adapter_.Enable(); + } + + void SetReceiver(PacketReceiver* receiver) { + transport_.SetReceiver(receiver); + } + + virtual Action OnSendRtcp(const uint8_t* packet, size_t length) OVERRIDE { + // Receive statistics reporting having lost 0% of the packets. + // This is needed for the send-side bitrate controller to work properly. + CriticalSectionScoped lock(crit_sect_.get()); + SendRtcpFeedback(0); // REMB is only sent if value is > 0. + return SEND_PACKET; + } + + virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE { + CriticalSectionScoped lock(crit_sect_.get()); + ++rtp_count_; + RTPHeader header; + EXPECT_TRUE(parser_->Parse(packet, static_cast(length), &header)); + last_sequence_number_ = header.sequenceNumber; + + if (test_state_ == kBeforeSuspend) { + // The stream has started. Try to suspend it. + SendRtcpFeedback(low_remb_bps_); + test_state_ = kDuringSuspend; + } else if (test_state_ == kDuringSuspend) { + if (header.paddingLength == 0) { + // Received non-padding packet during suspension period. Reset the + // counter. + // TODO(hlundin): We should probably make this test more advanced in + // the future, so that it verifies that the bitrate can go below the + // min_bitrate. This requires that the fake encoder sees the + // min_bitrate, and never goes below it. See WebRTC Issue 2655. + suspended_frame_count_ = 0; + } + } else if (test_state_ == kWaitingForPacket) { + if (header.paddingLength == 0) { + // Non-padding packet observed. Test is complete. + observation_complete_->Set(); + } + } + + return SEND_PACKET; + } + + // This method implements the I420FrameCallback. + void FrameCallback(I420VideoFrame* video_frame) OVERRIDE { + CriticalSectionScoped lock(crit_sect_.get()); + if (test_state_ == kDuringSuspend && + ++suspended_frame_count_ > kSuspendTimeFrames) { + SendRtcpFeedback(high_remb_bps_); + test_state_ = kWaitingForPacket; + } + } + + void set_low_remb_bps(int value) { low_remb_bps_ = value; } + + void set_high_remb_bps(int value) { high_remb_bps_ = value; } + + void Stop() { transport_.StopSending(); } + + private: + enum TestState { + kBeforeSuspend, + kDuringSuspend, + kWaitingForPacket, + kAfterSuspend + }; + + virtual void SendRtcpFeedback(int remb_value) { + FakeReceiveStatistics receive_stats( + kSendSsrc, last_sequence_number_, rtp_count_, 0); + RTCPSender rtcp_sender(0, false, clock_, &receive_stats); + EXPECT_EQ(0, rtcp_sender.RegisterSendTransport(&transport_adapter_)); + + rtcp_sender.SetRTCPStatus(kRtcpNonCompound); + rtcp_sender.SetRemoteSSRC(kSendSsrc); + if (remb_value > 0) { + rtcp_sender.SetREMBStatus(true); + rtcp_sender.SetREMBData(remb_value, 0, NULL); + } + RTCPSender::FeedbackState feedback_state; + EXPECT_EQ(0, rtcp_sender.SendRTCP(feedback_state, kRtcpRr)); + } + + internal::TransportAdapter transport_adapter_; + test::DirectTransport transport_; + Clock* clock_; + TestState test_state_; + int rtp_count_; + int last_sequence_number_; + int suspended_frame_count_; + int low_remb_bps_; + int high_remb_bps_; + scoped_ptr crit_sect_; + } observer; + + Call::Config call_config(observer.SendTransport()); + scoped_ptr call(Call::Create(call_config)); + observer.SetReceiver(call->Receiver()); + + VideoSendStream::Config send_config = GetSendTestConfig(call.get(), 1); + send_config.rtp.nack.rtp_history_ms = 1000; + send_config.pre_encode_callback = &observer; + send_config.suspend_below_min_bitrate = true; + unsigned int min_bitrate_bps = + send_config.codec.simulcastStream[0].minBitrate * 1000; + observer.set_low_remb_bps(min_bitrate_bps - 10000); + unsigned int threshold_window = std::max(min_bitrate_bps / 10, 10000u); + ASSERT_GT(send_config.codec.simulcastStream[0].maxBitrate * 1000, + min_bitrate_bps + threshold_window + 5000); + observer.set_high_remb_bps(min_bitrate_bps + threshold_window + 5000); + + RunSendTest(call.get(), send_config, &observer); + observer.Stop(); +} + +TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) { + class PacketObserver : public test::RtpRtcpObserver { + public: + PacketObserver() + : RtpRtcpObserver(30 * 1000), // Timeout after 30 seconds. + clock_(Clock::GetRealTimeClock()), + last_packet_time_ms_(-1), + transport_adapter_(ReceiveTransport()), + capturer_(NULL), + crit_sect_(CriticalSectionWrapper::CreateCriticalSection()) { + transport_adapter_.Enable(); + } + + void SetCapturer(test::FrameGeneratorCapturer* capturer) { + capturer_ = capturer; + } + + virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE { + CriticalSectionScoped lock(crit_sect_.get()); + last_packet_time_ms_ = clock_->TimeInMilliseconds(); + capturer_->Stop(); + return SEND_PACKET; + } + + virtual Action OnSendRtcp(const uint8_t* packet, size_t length) OVERRIDE { + CriticalSectionScoped lock(crit_sect_.get()); + const int kVideoMutedThresholdMs = 10000; + if (last_packet_time_ms_ > 0 && + clock_->TimeInMilliseconds() - last_packet_time_ms_ > + kVideoMutedThresholdMs) + observation_complete_->Set(); + // Receive statistics reporting having lost 50% of the packets. + FakeReceiveStatistics receive_stats(kSendSsrcs[0], 1, 1, 0); + RTCPSender rtcp_sender( + 0, false, Clock::GetRealTimeClock(), &receive_stats); + EXPECT_EQ(0, rtcp_sender.RegisterSendTransport(&transport_adapter_)); + + rtcp_sender.SetRTCPStatus(kRtcpNonCompound); + rtcp_sender.SetRemoteSSRC(kSendSsrcs[0]); + + RTCPSender::FeedbackState feedback_state; + + EXPECT_EQ(0, rtcp_sender.SendRTCP(feedback_state, kRtcpRr)); + return SEND_PACKET; + } + + private: + Clock* clock_; + int64_t last_packet_time_ms_; + internal::TransportAdapter transport_adapter_; + test::FrameGeneratorCapturer* capturer_; + scoped_ptr crit_sect_; + } observer; + + Call::Config call_config(observer.SendTransport()); + scoped_ptr call(Call::Create(call_config)); + observer.SetReceivers(call->Receiver(), call->Receiver()); + + VideoSendStream::Config send_config = GetSendTestConfig(call.get(), 3); + + send_stream_ = call->CreateVideoSendStream(send_config); + scoped_ptr frame_generator_capturer( + test::FrameGeneratorCapturer::Create( + send_stream_->Input(), 320, 240, 30, Clock::GetRealTimeClock())); + observer.SetCapturer(frame_generator_capturer.get()); + send_stream_->StartSending(); + frame_generator_capturer->Start(); + + EXPECT_EQ(kEventSignaled, observer.Wait()) + << "Timed out while waiting for RTP packets to stop being sent."; + + observer.StopSending(); + frame_generator_capturer->Stop(); + send_stream_->StopSending(); + call->DestroyVideoSendStream(send_stream_); +} + +TEST_F(VideoSendStreamTest, ProducesStats) { + static std::string kCName = "PjQatC14dGfbVwGPUOA9IH7RlsFDbWl4AhXEiDsBizo="; + class StatsObserver : public test::RtpRtcpObserver { + public: + StatsObserver() : RtpRtcpObserver(30 * 1000), stream_(NULL) {} + + virtual Action OnSendRtcp(const uint8_t* packet, size_t length) OVERRIDE { + VideoSendStream::Stats stats = stream_->GetStats(); + // Check that all applicable data sources have been used. + if (stats.input_frame_rate > 0 && stats.encode_frame_rate > 0 && + stats.avg_delay_ms > 0 && stats.c_name == kCName && + !stats.substreams.empty()) { + uint32_t ssrc = stats.substreams.begin()->first; + EXPECT_NE( + config_.rtp.ssrcs.end(), + std::find( + config_.rtp.ssrcs.begin(), config_.rtp.ssrcs.end(), ssrc)); + // Check for data populated by various sources. RTCP excluded as this + // data is received from remote side. Tested in call tests instead. + StreamStats& entry = stats.substreams[ssrc]; + if (entry.key_frames > 0u && entry.bitrate_bps > 0 && + entry.rtp_stats.packets > 0u) { + observation_complete_->Set(); + } + } + + return SEND_PACKET; + } + + void SetConfig(const VideoSendStream::Config& config) { config_ = config; } + + void SetSendStream(VideoSendStream* stream) { stream_ = stream; } + + VideoSendStream* stream_; + VideoSendStream::Config config_; + } observer; + + Call::Config call_config(observer.SendTransport()); + scoped_ptr call(Call::Create(call_config)); + + VideoSendStream::Config send_config = GetSendTestConfig(call.get(), 1); + send_config.rtp.c_name = kCName; + observer.SetConfig(send_config); + + send_stream_ = call->CreateVideoSendStream(send_config); + observer.SetSendStream(send_stream_); + scoped_ptr frame_generator_capturer( + test::FrameGeneratorCapturer::Create( + send_stream_->Input(), 320, 240, 30, Clock::GetRealTimeClock())); + send_stream_->StartSending(); + frame_generator_capturer->Start(); + + EXPECT_EQ(kEventSignaled, observer.Wait()); + + observer.StopSending(); + frame_generator_capturer->Stop(); + send_stream_->StopSending(); + call->DestroyVideoSendStream(send_stream_); +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/video/webrtc_video.gypi b/media/webrtc/trunk/webrtc/video/webrtc_video.gypi new file mode 100644 index 000000000000..4de970abdeb0 --- /dev/null +++ b/media/webrtc/trunk/webrtc/video/webrtc_video.gypi @@ -0,0 +1,29 @@ +# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. +{ + 'variables': { + 'webrtc_video_dependencies': [ + '<(webrtc_root)/video_engine/video_engine.gyp:video_engine_core', + ], + 'webrtc_video_sources': [ + 'video/call.cc', + 'video/encoded_frame_callback_adapter.cc', + 'video/encoded_frame_callback_adapter.h', + 'video/send_statistics_proxy.cc', + 'video/send_statistics_proxy.h', + 'video/receive_statistics_proxy.cc', + 'video/receive_statistics_proxy.h', + 'video/transport_adapter.cc', + 'video/transport_adapter.h', + 'video/video_receive_stream.cc', + 'video/video_receive_stream.h', + 'video/video_send_stream.cc', + 'video/video_send_stream.h', + ], + }, +} diff --git a/media/webrtc/trunk/webrtc/video_engine/Android.mk b/media/webrtc/trunk/webrtc/video_engine/Android.mk index b0a0d63ff284..74866cc2eb70 100644 --- a/media/webrtc/trunk/webrtc/video_engine/Android.mk +++ b/media/webrtc/trunk/webrtc/video_engine/Android.mk @@ -20,7 +20,6 @@ LOCAL_SRC_FILES := \ vie_base_impl.cc \ vie_capture_impl.cc \ vie_codec_impl.cc \ - vie_encryption_impl.cc \ vie_external_codec_impl.cc \ vie_file_impl.cc \ vie_image_process_impl.cc \ @@ -61,7 +60,6 @@ LOCAL_C_INCLUDES := \ $(LOCAL_PATH)/../common_video/vplib/main/interface \ $(LOCAL_PATH)/../modules/interface \ $(LOCAL_PATH)/../modules/audio_coding/main/interface \ - $(LOCAL_PATH)/../modules/media_file/interface \ $(LOCAL_PATH)/../modules/rtp_rtcp/interface \ $(LOCAL_PATH)/../modules/udp_transport/interface \ $(LOCAL_PATH)/../modules/utility/interface \ diff --git a/media/webrtc/trunk/webrtc/video_engine/call_stats.cc b/media/webrtc/trunk/webrtc/video_engine/call_stats.cc index 0184e5b32e1b..a79a7d2fb510 100644 --- a/media/webrtc/trunk/webrtc/video_engine/call_stats.cc +++ b/media/webrtc/trunk/webrtc/video_engine/call_stats.cc @@ -24,7 +24,7 @@ const int kRttTimeoutMs = 1500; // Time interval for updating the observers. const int kUpdateIntervalMs = 1000; -class RtcpObserver : public RtcpRttObserver { +class RtcpObserver : public RtcpRttStats { public: explicit RtcpObserver(CallStats* owner) : owner_(owner) {} virtual ~RtcpObserver() {} @@ -33,6 +33,10 @@ class RtcpObserver : public RtcpRttObserver { owner_->OnRttUpdate(rtt); } + virtual uint32_t LastProcessedRtt() const { + return owner_->last_processed_rtt_ms(); + } + private: CallStats* owner_; @@ -41,8 +45,9 @@ class RtcpObserver : public RtcpRttObserver { CallStats::CallStats() : crit_(CriticalSectionWrapper::CreateCriticalSection()), - rtcp_rtt_observer_(new RtcpObserver(this)), - last_process_time_(TickTime::MillisecondTimestamp()) { + rtcp_rtt_stats_(new RtcpObserver(this)), + last_process_time_(TickTime::MillisecondTimestamp()), + last_processed_rtt_ms_(0) { } CallStats::~CallStats() { @@ -81,12 +86,18 @@ int32_t CallStats::Process() { (*it)->OnRttUpdate(max_rtt); } } + last_processed_rtt_ms_ = max_rtt; last_process_time_ = time_now; return 0; } -RtcpRttObserver* CallStats::rtcp_rtt_observer() const { - return rtcp_rtt_observer_.get(); +uint32_t CallStats::last_processed_rtt_ms() const { + CriticalSectionScoped cs(crit_.get()); + return last_processed_rtt_ms_; +} + +RtcpRttStats* CallStats::rtcp_rtt_stats() const { + return rtcp_rtt_stats_.get(); } void CallStats::RegisterStatsObserver(CallStatsObserver* observer) { diff --git a/media/webrtc/trunk/webrtc/video_engine/call_stats.h b/media/webrtc/trunk/webrtc/video_engine/call_stats.h index 5fd93a762418..5c021a484ea3 100644 --- a/media/webrtc/trunk/webrtc/video_engine/call_stats.h +++ b/media/webrtc/trunk/webrtc/video_engine/call_stats.h @@ -19,9 +19,9 @@ namespace webrtc { -class CriticalSectionWrapper; -class RtcpRttObserver; class CallStatsObserver; +class CriticalSectionWrapper; +class RtcpRttStats; // CallStats keeps track of statistics for a call. class CallStats : public Module { @@ -35,9 +35,9 @@ class CallStats : public Module { virtual int32_t TimeUntilNextProcess(); virtual int32_t Process(); - // Returns a RtcpRttObserver to register at a statistics provider. The object + // Returns a RtcpRttStats to register at a statistics provider. The object // has the same lifetime as the CallStats instance. - RtcpRttObserver* rtcp_rtt_observer() const; + RtcpRttStats* rtcp_rtt_stats() const; // Registers/deregisters a new observer to receive statistics updates. void RegisterStatsObserver(CallStatsObserver* observer); @@ -46,6 +46,8 @@ class CallStats : public Module { protected: void OnRttUpdate(uint32_t rtt); + uint32_t last_processed_rtt_ms() const; + private: // Helper struct keeping track of the time a rtt value is reported. struct RttTime { @@ -58,9 +60,11 @@ class CallStats : public Module { // Protecting all members. scoped_ptr crit_; // Observer receiving statistics updates. - scoped_ptr rtcp_rtt_observer_; + scoped_ptr rtcp_rtt_stats_; // The last time 'Process' resulted in statistic update. int64_t last_process_time_; + // The last RTT in the statistics update (zero if there is no valid estimate). + uint32_t last_processed_rtt_ms_; // All Rtt reports within valid time interval, oldest first. std::list reports_; diff --git a/media/webrtc/trunk/webrtc/video_engine/call_stats_unittest.cc b/media/webrtc/trunk/webrtc/video_engine/call_stats_unittest.cc index e5b23611b5fb..f504094ba27b 100644 --- a/media/webrtc/trunk/webrtc/video_engine/call_stats_unittest.cc +++ b/media/webrtc/trunk/webrtc/video_engine/call_stats_unittest.cc @@ -41,15 +41,24 @@ class CallStatsTest : public ::testing::Test { TEST_F(CallStatsTest, AddAndTriggerCallback) { MockStatsObserver stats_observer; - RtcpRttObserver* rtcp_observer = call_stats_->rtcp_rtt_observer(); + RtcpRttStats* rtcp_rtt_stats = call_stats_->rtcp_rtt_stats(); call_stats_->RegisterStatsObserver(&stats_observer); TickTime::AdvanceFakeClock(1000); + EXPECT_EQ(0U, rtcp_rtt_stats->LastProcessedRtt()); - uint32_t rtt = 25; - rtcp_observer->OnRttUpdate(rtt); - EXPECT_CALL(stats_observer, OnRttUpdate(rtt)) + const uint32_t kRtt = 25; + rtcp_rtt_stats->OnRttUpdate(kRtt); + EXPECT_CALL(stats_observer, OnRttUpdate(kRtt)) .Times(1); call_stats_->Process(); + EXPECT_EQ(kRtt, rtcp_rtt_stats->LastProcessedRtt()); + + const int kRttTimeOutMs = 1500 + 10; + TickTime::AdvanceFakeClock(kRttTimeOutMs); + EXPECT_CALL(stats_observer, OnRttUpdate(_)) + .Times(0); + call_stats_->Process(); + EXPECT_EQ(0U, rtcp_rtt_stats->LastProcessedRtt()); call_stats_->DeregisterStatsObserver(&stats_observer); } @@ -57,8 +66,8 @@ TEST_F(CallStatsTest, AddAndTriggerCallback) { TEST_F(CallStatsTest, ProcessTime) { MockStatsObserver stats_observer; call_stats_->RegisterStatsObserver(&stats_observer); - RtcpRttObserver* rtcp_observer = call_stats_->rtcp_rtt_observer(); - rtcp_observer->OnRttUpdate(100); + RtcpRttStats* rtcp_rtt_stats = call_stats_->rtcp_rtt_stats(); + rtcp_rtt_stats->OnRttUpdate(100); // Time isn't updated yet. EXPECT_CALL(stats_observer, OnRttUpdate(_)) @@ -73,7 +82,7 @@ TEST_F(CallStatsTest, ProcessTime) { // Advance clock just too little to get an update. TickTime::AdvanceFakeClock(999); - rtcp_observer->OnRttUpdate(100); + rtcp_rtt_stats->OnRttUpdate(100); EXPECT_CALL(stats_observer, OnRttUpdate(_)) .Times(0); call_stats_->Process(); @@ -98,9 +107,9 @@ TEST_F(CallStatsTest, MultipleObservers) { call_stats_->RegisterStatsObserver(&stats_observer_2); call_stats_->RegisterStatsObserver(&stats_observer_2); - RtcpRttObserver* rtcp_observer = call_stats_->rtcp_rtt_observer(); + RtcpRttStats* rtcp_rtt_stats = call_stats_->rtcp_rtt_stats(); uint32_t rtt = 100; - rtcp_observer->OnRttUpdate(rtt); + rtcp_rtt_stats->OnRttUpdate(rtt); // Verify both observers are updated. TickTime::AdvanceFakeClock(1000); @@ -113,7 +122,7 @@ TEST_F(CallStatsTest, MultipleObservers) { // Deregister the second observer and verify update is only sent to the first // observer. call_stats_->DeregisterStatsObserver(&stats_observer_2); - rtcp_observer->OnRttUpdate(rtt); + rtcp_rtt_stats->OnRttUpdate(rtt); TickTime::AdvanceFakeClock(1000); EXPECT_CALL(stats_observer_1, OnRttUpdate(rtt)) .Times(1); @@ -123,7 +132,7 @@ TEST_F(CallStatsTest, MultipleObservers) { // Deregister the first observer. call_stats_->DeregisterStatsObserver(&stats_observer_1); - rtcp_observer->OnRttUpdate(rtt); + rtcp_rtt_stats->OnRttUpdate(rtt); TickTime::AdvanceFakeClock(1000); EXPECT_CALL(stats_observer_1, OnRttUpdate(rtt)) .Times(0); @@ -136,14 +145,14 @@ TEST_F(CallStatsTest, MultipleObservers) { TEST_F(CallStatsTest, ChangeRtt) { MockStatsObserver stats_observer; call_stats_->RegisterStatsObserver(&stats_observer); - RtcpRttObserver* rtcp_observer = call_stats_->rtcp_rtt_observer(); + RtcpRttStats* rtcp_rtt_stats = call_stats_->rtcp_rtt_stats(); // Advance clock to be ready for an update. TickTime::AdvanceFakeClock(1000); // Set a first value and verify the callback is triggered. const uint32_t first_rtt = 100; - rtcp_observer->OnRttUpdate(first_rtt); + rtcp_rtt_stats->OnRttUpdate(first_rtt); EXPECT_CALL(stats_observer, OnRttUpdate(first_rtt)) .Times(1); call_stats_->Process(); @@ -151,7 +160,7 @@ TEST_F(CallStatsTest, ChangeRtt) { // Increase rtt and verify the new value is reported. TickTime::AdvanceFakeClock(1000); const uint32_t high_rtt = first_rtt + 20; - rtcp_observer->OnRttUpdate(high_rtt); + rtcp_rtt_stats->OnRttUpdate(high_rtt); EXPECT_CALL(stats_observer, OnRttUpdate(high_rtt)) .Times(1); call_stats_->Process(); @@ -161,13 +170,13 @@ TEST_F(CallStatsTest, ChangeRtt) { // in the callback. TickTime::AdvanceFakeClock(1000); const uint32_t low_rtt = first_rtt - 20; - rtcp_observer->OnRttUpdate(low_rtt); + rtcp_rtt_stats->OnRttUpdate(low_rtt); EXPECT_CALL(stats_observer, OnRttUpdate(high_rtt)) .Times(1); call_stats_->Process(); - // Advance time to make the high report invalid, the lower rtt should no be in - // the callback. + // Advance time to make the high report invalid, the lower rtt should now be + // in the callback. TickTime::AdvanceFakeClock(1000); EXPECT_CALL(stats_observer, OnRttUpdate(low_rtt)) .Times(1); diff --git a/media/webrtc/trunk/webrtc/video_engine/encoder_state_feedback_unittest.cc b/media/webrtc/trunk/webrtc/video_engine/encoder_state_feedback_unittest.cc index 7f3316591239..f85d98979188 100644 --- a/media/webrtc/trunk/webrtc/video_engine/encoder_state_feedback_unittest.cc +++ b/media/webrtc/trunk/webrtc/video_engine/encoder_state_feedback_unittest.cc @@ -30,7 +30,7 @@ class TestProcessThread : public ProcessThread { ~TestProcessThread() {} virtual int32_t Start() { return 0; } virtual int32_t Stop() { return 0; } - virtual int32_t RegisterModule(const Module* module) { return 0; } + virtual int32_t RegisterModule(Module* module) { return 0; } virtual int32_t DeRegisterModule(const Module* module) { return 0; } }; diff --git a/media/webrtc/trunk/webrtc/video_engine/include/vie_base.h b/media/webrtc/trunk/webrtc/video_engine/include/vie_base.h index 094319eea24a..c3952fb83705 100644 --- a/media/webrtc/trunk/webrtc/video_engine/include/vie_base.h +++ b/media/webrtc/trunk/webrtc/video_engine/include/vie_base.h @@ -21,6 +21,10 @@ #include "webrtc/common_types.h" +#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) && !defined(MOZ_WIDGET_GONK) +#include +#endif + namespace webrtc { class Config; @@ -60,10 +64,10 @@ class WEBRTC_DLLEXPORT VideoEngine { // user receives callbacks for generated trace messages. static int SetTraceCallback(TraceCallback* callback); +#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) && !defined(MOZ_WIDGET_GONK) // Android specific. - // Provides VideoEngine with pointers to objects supplied by the Java - // applications JNI interface. - static int SetAndroidObjects(void* java_vm, void* java_context); + static int SetAndroidObjects(JavaVM* java_vm); +#endif protected: VideoEngine() {} @@ -115,6 +119,25 @@ class WEBRTC_DLLEXPORT ViEBase { virtual int RegisterCpuOveruseObserver(int channel, CpuOveruseObserver* observer) = 0; + // Gets cpu overuse measures. + // capture_jitter_ms: The current estimated jitter in ms based on incoming + // captured frames. + // avg_encode_time_ms: The average encode time in ms. + // encode_usage_percent: The average encode time divided by the average time + // difference between incoming captured frames. + // capture_queue_delay_ms_per_s: The current time delay between an incoming + // captured frame until the frame is being + // processed. The delay is expressed in ms + // delay per second. + // TODO(asapersson): Remove default implementation. + virtual int CpuOveruseMeasures(int channel, + int* capture_jitter_ms, + int* avg_encode_time_ms, + int* encode_usage_percent, + int* capture_queue_delay_ms_per_s) { + return -1; + } + // Changing the current state of the host CPU. Encoding engines // can adapt their behavior if needed. (Optional) virtual void SetLoadManager(CPULoadStateCallbackInvoker* load_manager) = 0; diff --git a/media/webrtc/trunk/webrtc/video_engine/include/vie_capture.h b/media/webrtc/trunk/webrtc/video_engine/include/vie_capture.h index 2174d5dc6f2e..cee362651012 100644 --- a/media/webrtc/trunk/webrtc/video_engine/include/vie_capture.h +++ b/media/webrtc/trunk/webrtc/video_engine/include/vie_capture.h @@ -19,6 +19,7 @@ #define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_CAPTURE_H_ #include "webrtc/common_types.h" +#include "webrtc/common_video/interface/i420_video_frame.h" namespace webrtc { @@ -117,6 +118,8 @@ class WEBRTC_DLLEXPORT ViEExternalCapture { virtual int IncomingFrameI420( const ViEVideoFrameI420& video_frame, unsigned long long capture_time = 0) = 0; + + virtual void SwapFrame(I420VideoFrame* frame) {} }; // This class declares an abstract interface for a user defined observer. It is diff --git a/media/webrtc/trunk/webrtc/video_engine/include/vie_codec.h b/media/webrtc/trunk/webrtc/video_engine/include/vie_codec.h index 47bc6861cbc6..f658aaa24564 100644 --- a/media/webrtc/trunk/webrtc/video_engine/include/vie_codec.h +++ b/media/webrtc/trunk/webrtc/video_engine/include/vie_codec.h @@ -35,6 +35,11 @@ class WEBRTC_DLLEXPORT ViEEncoderObserver { virtual void OutgoingRate(const int video_channel, const unsigned int framerate, const unsigned int bitrate) = 0; + + // This method is called whenever the state of the SuspendBelowMinBitrate + // changes, i.e., when |is_suspended| toggles. + virtual void SuspendChange(int video_channel, bool is_suspended) = 0; + protected: virtual ~ViEEncoderObserver() {} }; @@ -56,6 +61,16 @@ class WEBRTC_DLLEXPORT ViEDecoderObserver { const unsigned int framerate, const unsigned int bitrate) = 0; + // Called periodically with decoder timing information. All values are + // "current" snapshots unless decorated with a min_/max_ prefix. + virtual void DecoderTiming(int decode_ms, + int max_decode_ms, + int current_delay_ms, + int target_delay_ms, + int jitter_buffer_ms, + int min_playout_delay_ms, + int render_delay_ms) = 0; + // This method is called when the decoder needs a new key frame from encoder // on the sender. virtual void RequestNewKeyFrame(const int video_channel) = 0; @@ -177,6 +192,16 @@ class WEBRTC_DLLEXPORT ViECodec { // Disables recording of debugging information. virtual int StopDebugRecording(int video_channel) = 0; + // Lets the sender suspend video when the rate drops below + // |threshold_bps|, and turns back on when the rate goes back up above + // |threshold_bps| + |window_bps|. + // This is under development; not tested. + virtual void SuspendBelowMinBitrate(int video_channel) = 0; + + // TODO(holmer): Remove this default implementation when possible. + virtual bool GetSendSideDelay(int video_channel, int* avg_delay_ms, + int* max_delay_ms) const { return false; } + protected: ViECodec() {} virtual ~ViECodec() {} diff --git a/media/webrtc/trunk/webrtc/video_engine/include/vie_encryption.h b/media/webrtc/trunk/webrtc/video_engine/include/vie_encryption.h deleted file mode 100644 index 6cf721f8f44c..000000000000 --- a/media/webrtc/trunk/webrtc/video_engine/include/vie_encryption.h +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This sub-API supports the following functionalities: -// - External encryption and decryption. - -#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_ENCRYPTION_H_ -#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_ENCRYPTION_H_ - -#include "webrtc/common_types.h" - -namespace webrtc { -class VideoEngine; - -class WEBRTC_DLLEXPORT ViEEncryption { - public: - // Factory for the ViEEncryption sub‐API and increases an internal reference - // counter if successful. Returns NULL if the API is not supported or if - // construction fails. - static ViEEncryption* GetInterface(VideoEngine* video_engine); - - // Releases the ViEEncryption sub-API and decreases an internal reference - // counter. - // Returns the new reference count. This value should be zero - // for all sub-API:s before the VideoEngine object can be safely deleted. - virtual int Release() = 0; - - // This function registers a encryption derived instance and enables - // external encryption for the specified channel. - virtual int RegisterExternalEncryption(const int video_channel, - Encryption& encryption) = 0; - - // This function deregisters a registered encryption derived instance - // and disables external encryption. - virtual int DeregisterExternalEncryption(const int video_channel) = 0; - - protected: - ViEEncryption() {} - virtual ~ViEEncryption() {} -}; - -} // namespace webrtc - -#endif // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_ENCRYPTION_H_ diff --git a/media/webrtc/trunk/webrtc/video_engine/include/vie_errors.h b/media/webrtc/trunk/webrtc/video_engine/include/vie_errors.h index 35af194939cd..1e9be1d49add 100644 --- a/media/webrtc/trunk/webrtc/video_engine/include/vie_errors.h +++ b/media/webrtc/trunk/webrtc/video_engine/include/vie_errors.h @@ -98,12 +98,6 @@ enum ViEErrors { kViERtpRtcpObserverNotRegistered, // No observer registered. kViERtpRtcpUnknownError, // An unknown error has occurred. Check the log file. - // ViEEncryption. - kViEEncryptionInvalidChannelId = 12700, // Channel id does not exist. - kViEEncryptionInvalidSrtpParameter, // DEPRECATED - kViEEncryptionSrtpNotSupported, // DEPRECATED - kViEEncryptionUnknownError, // An unknown error has occurred. Check the log file. - // ViEImageProcess. kViEImageProcessInvalidChannelId = 12800, // No Channel exist with the provided channel id. kViEImageProcessInvalidCaptureId, // No capture device exist with the provided capture id. diff --git a/media/webrtc/trunk/webrtc/video_engine/include/vie_image_process.h b/media/webrtc/trunk/webrtc/video_engine/include/vie_image_process.h index cb66bb161c42..aff2d613fd5c 100644 --- a/media/webrtc/trunk/webrtc/video_engine/include/vie_image_process.h +++ b/media/webrtc/trunk/webrtc/video_engine/include/vie_image_process.h @@ -18,10 +18,11 @@ #define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_IMAGE_PROCESS_H_ #include "webrtc/common_types.h" -#include "webrtc/common_video/interface/i420_video_frame.h" namespace webrtc { +class EncodedImageCallback; +class I420FrameCallback; class VideoEngine; // This class declares an abstract interface for a user defined effect filter. @@ -90,6 +91,27 @@ class WEBRTC_DLLEXPORT ViEImageProcess { virtual int EnableColorEnhancement(const int video_channel, const bool enable) = 0; + // New-style callbacks, used by VideoSendStream/VideoReceiveStream. + virtual void RegisterPreEncodeCallback( + int video_channel, + I420FrameCallback* pre_encode_callback) = 0; + virtual void DeRegisterPreEncodeCallback(int video_channel) = 0; + + virtual void RegisterPostEncodeImageCallback( + int video_channel, + EncodedImageCallback* post_encode_callback) {} + virtual void DeRegisterPostEncodeCallback(int video_channel) {} + + virtual void RegisterPreDecodeImageCallback( + int video_channel, + EncodedImageCallback* pre_decode_callback) {} + virtual void DeRegisterPreDecodeCallback(int video_channel) {} + + virtual void RegisterPreRenderCallback( + int video_channel, + I420FrameCallback* pre_render_callback) = 0; + virtual void DeRegisterPreRenderCallback(int video_channel) = 0; + protected: ViEImageProcess() {} virtual ~ViEImageProcess() {} diff --git a/media/webrtc/trunk/webrtc/video_engine/include/vie_network.h b/media/webrtc/trunk/webrtc/video_engine/include/vie_network.h index e1c6bb2c5025..4a9e6ce203f4 100644 --- a/media/webrtc/trunk/webrtc/video_engine/include/vie_network.h +++ b/media/webrtc/trunk/webrtc/video_engine/include/vie_network.h @@ -65,7 +65,8 @@ class WEBRTC_DLLEXPORT ViENetwork { // the RTP header and payload. virtual int ReceivedRTPPacket(const int video_channel, const void* data, - const int length) = 0; + const int length, + const PacketTime& packet_time) = 0; // When using external transport for a channel, received RTCP packets should // be passed to VideoEngine using this function. diff --git a/media/webrtc/trunk/webrtc/video_engine/include/vie_render.h b/media/webrtc/trunk/webrtc/video_engine/include/vie_render.h index 48afc1adbbd4..ab61d7aca735 100644 --- a/media/webrtc/trunk/webrtc/video_engine/include/vie_render.h +++ b/media/webrtc/trunk/webrtc/video_engine/include/vie_render.h @@ -22,10 +22,11 @@ namespace webrtc { class VideoEngine; class VideoRender; +class VideoRenderCallback; // This class declares an abstract interface to be used for external renderers. // The user implemented derived class is registered using AddRenderer(). -class WEBRTC_DLLEXPORT ExternalRenderer { +class ExternalRenderer { public: // This method will be called when the stream to be rendered changes in // resolution or number of streams mixed in the image. @@ -51,7 +52,7 @@ class WEBRTC_DLLEXPORT ExternalRenderer { virtual ~ExternalRenderer() {} }; -class WEBRTC_DLLEXPORT ViERender { +class ViERender { public: // Factory for the ViERender sub‐API and increases an internal reference // counter if successful. Returns NULL if the API is not supported or if @@ -111,6 +112,13 @@ class WEBRTC_DLLEXPORT ViERender { RawVideoType video_input_format, ExternalRenderer* renderer) = 0; + // Propagating VideoRenderCallback down to the VideoRender module for new API. + // Contains default-implementation not to break code mocking this interface. + // (Ugly, but temporary.) + virtual int AddRenderCallback(int render_id, VideoRenderCallback* callback) { + return 0; + } + protected: ViERender() {} virtual ~ViERender() {} diff --git a/media/webrtc/trunk/webrtc/video_engine/include/vie_rtp_rtcp.h b/media/webrtc/trunk/webrtc/video_engine/include/vie_rtp_rtcp.h index 474cd9987e00..802b081705f6 100644 --- a/media/webrtc/trunk/webrtc/video_engine/include/vie_rtp_rtcp.h +++ b/media/webrtc/trunk/webrtc/video_engine/include/vie_rtp_rtcp.h @@ -27,6 +27,7 @@ namespace webrtc { class VideoEngine; +struct ReceiveBandwidthEstimatorStats; // This enumerator sets the RTCP mode. enum ViERTCPMode { @@ -266,6 +267,11 @@ class WEBRTC_DLLEXPORT ViERTP_RTCP { bool enable, int id) = 0; + // Enables/disables RTCP Receiver Reference Time Report Block extension/ + // DLRR Report Block extension (RFC 3611). + // TODO(asapersson): Remove default implementation. + virtual int SetRtcpXrRrtrStatus(int video_channel, bool enable) { return -1; } + // Enables transmission smoothening, i.e. packets belonging to the same frame // will be sent over a longer period of time instead of sending them // back-to-back. @@ -274,29 +280,97 @@ class WEBRTC_DLLEXPORT ViERTP_RTCP { // This function returns our locally created statistics of the received RTP // stream. - virtual int GetReceivedRTCPStatistics( - const int video_channel, - unsigned short& fraction_lost, - unsigned int& cumulative_lost, - unsigned int& extended_max, - unsigned int& jitter, - int& rtt_ms) const = 0; + virtual int GetReceiveChannelRtcpStatistics(const int video_channel, + RtcpStatistics& basic_stats, + int& rtt_ms) const = 0; // This function returns statistics reported by the remote client in a RTCP // packet. + virtual int GetSendChannelRtcpStatistics(const int video_channel, + RtcpStatistics& basic_stats, + int& rtt_ms) const = 0; + + // TODO(sprang): Temporary hacks to prevent libjingle build from failing, + // remove when libjingle has been lifted to support webrtc issue 2589 + virtual int GetReceivedRTCPStatistics(const int video_channel, + unsigned short& fraction_lost, + unsigned int& cumulative_lost, + unsigned int& extended_max, + unsigned int& jitter, + int& rtt_ms) const { + RtcpStatistics stats; + int ret_code = GetReceiveChannelRtcpStatistics(video_channel, + stats, + rtt_ms); + fraction_lost = stats.fraction_lost; + cumulative_lost = stats.cumulative_lost; + extended_max = stats.extended_max_sequence_number; + jitter = stats.jitter; + return ret_code; + } virtual int GetSentRTCPStatistics(const int video_channel, - unsigned short& fraction_lost, - unsigned int& cumulative_lost, - unsigned int& extended_max, - unsigned int& jitter, - int& rtt_ms) const = 0; + unsigned short& fraction_lost, + unsigned int& cumulative_lost, + unsigned int& extended_max, + unsigned int& jitter, + int& rtt_ms) const { + RtcpStatistics stats; + int ret_code = GetSendChannelRtcpStatistics(video_channel, + stats, + rtt_ms); + fraction_lost = stats.fraction_lost; + cumulative_lost = stats.cumulative_lost; + extended_max = stats.extended_max_sequence_number; + jitter = stats.jitter; + return ret_code; + } + + + virtual int RegisterSendChannelRtcpStatisticsCallback( + int video_channel, RtcpStatisticsCallback* callback) = 0; + + virtual int DeregisterSendChannelRtcpStatisticsCallback( + int video_channel, RtcpStatisticsCallback* callback) = 0; + + virtual int RegisterReceiveChannelRtcpStatisticsCallback( + int video_channel, RtcpStatisticsCallback* callback) = 0; + + virtual int DeregisterReceiveChannelRtcpStatisticsCallback( + int video_channel, RtcpStatisticsCallback* callback) = 0; // The function gets statistics from the sent and received RTP streams. + virtual int GetRtpStatistics(const int video_channel, + StreamDataCounters& sent, + StreamDataCounters& received) const = 0; + + // TODO(sprang): Temporary hacks to prevent libjingle build from failing, + // remove when libjingle has been lifted to support webrtc issue 2589 virtual int GetRTPStatistics(const int video_channel, - unsigned int& bytes_sent, - unsigned int& packets_sent, - unsigned int& bytes_received, - unsigned int& packets_received) const = 0; + unsigned int& bytes_sent, + unsigned int& packets_sent, + unsigned int& bytes_received, + unsigned int& packets_received) const { + StreamDataCounters sent; + StreamDataCounters received; + int ret_code = GetRtpStatistics(video_channel, sent, received); + bytes_sent = sent.bytes; + packets_sent = sent.packets; + bytes_received = received.bytes; + packets_received = received.packets; + return ret_code; + } + + virtual int RegisterSendChannelRtpStatisticsCallback( + int video_channel, StreamDataCountersCallback* callback) = 0; + + virtual int DeregisterSendChannelRtpStatisticsCallback( + int video_channel, StreamDataCountersCallback* callback) = 0; + + virtual int RegisterReceiveChannelRtpStatisticsCallback( + int video_channel, StreamDataCountersCallback* callback) = 0; + + virtual int DeregisterReceiveChannelRtpStatisticsCallback( + int video_channel, StreamDataCountersCallback* callback) = 0; // Gets the sender info part of the last received RTCP Sender Report (SR) virtual int GetRemoteRTCPSenderInfo(const int video_channel, @@ -310,6 +384,15 @@ class WEBRTC_DLLEXPORT ViERTP_RTCP { unsigned int& fec_bitrate_sent, unsigned int& nackBitrateSent) const = 0; + // (De)Register an observer, called whenever the send bitrate is updated + virtual int RegisterSendBitrateObserver( + int video_channel, + BitrateStatisticsObserver* observer) = 0; + + virtual int DeregisterSendBitrateObserver( + int video_channel, + BitrateStatisticsObserver* observer) = 0; + // This function gets the send-side estimated bandwidth available for video, // including overhead, in bits/s. virtual int GetEstimatedSendBandwidth( @@ -323,6 +406,13 @@ class WEBRTC_DLLEXPORT ViERTP_RTCP { const int video_channel, unsigned int* estimated_bandwidth) const = 0; + // This function gets the receive-side bandwidth esitmator statistics. + // TODO(jiayl): remove the default impl when libjingle's FakeWebRtcVideoEngine + // is updated. + virtual int GetReceiveBandwidthEstimatorStats( + const int video_channel, + ReceiveBandwidthEstimatorStats* output) const { return -1; } + // This function enables capturing of RTP packets to a binary file on a // specific channel and for a given direction. The file can later be // replayed using e.g. RTP Tools rtpplay since the binary file format is @@ -350,8 +440,15 @@ class WEBRTC_DLLEXPORT ViERTP_RTCP { // Removes a registered instance of ViERTCPObserver. virtual int DeregisterRTCPObserver(const int video_channel) = 0; + // Registers and instance of a user implementation of ViEFrameCountObserver + virtual int RegisterSendFrameCountObserver( + int video_channel, FrameCountObserver* observer) = 0; + + // Removes a registered instance of a ViEFrameCountObserver + virtual int DeregisterSendFrameCountObserver( + int video_channel, FrameCountObserver* observer) = 0; + protected: - ViERTP_RTCP() {} virtual ~ViERTP_RTCP() {} }; diff --git a/media/webrtc/trunk/webrtc/video_engine/internal/call.cc b/media/webrtc/trunk/webrtc/video_engine/internal/call.cc deleted file mode 100644 index 94c5b7ed4827..000000000000 --- a/media/webrtc/trunk/webrtc/video_engine/internal/call.cc +++ /dev/null @@ -1,225 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "webrtc/video_engine/internal/call.h" - -#include -#include - -#include -#include - -#include "webrtc/video_engine/include/vie_base.h" -#include "webrtc/video_engine/include/vie_codec.h" -#include "webrtc/video_engine/include/vie_rtp_rtcp.h" -#include "webrtc/video_engine/internal/video_receive_stream.h" -#include "webrtc/video_engine/internal/video_send_stream.h" - -namespace webrtc { - -Call* Call::Create(const Call::Config& config) { - VideoEngine* video_engine = VideoEngine::Create(); - assert(video_engine != NULL); - - return new internal::Call(video_engine, config); -} - -namespace internal { - -Call::Call(webrtc::VideoEngine* video_engine, const Call::Config& config) - : config_(config), - receive_lock_(RWLockWrapper::CreateRWLock()), - send_lock_(RWLockWrapper::CreateRWLock()), - rtp_header_parser_(RtpHeaderParser::Create()), - video_engine_(video_engine) { - assert(video_engine != NULL); - assert(config.send_transport != NULL); - - rtp_rtcp_ = ViERTP_RTCP::GetInterface(video_engine_); - assert(rtp_rtcp_ != NULL); - - codec_ = ViECodec::GetInterface(video_engine_); - assert(codec_ != NULL); -} - -Call::~Call() { - codec_->Release(); - rtp_rtcp_->Release(); - webrtc::VideoEngine::Delete(video_engine_); -} - -PacketReceiver* Call::Receiver() { return this; } - -std::vector Call::GetVideoCodecs() { - std::vector codecs; - - VideoCodec codec; - for (size_t i = 0; i < static_cast(codec_->NumberOfCodecs()); ++i) { - if (codec_->GetCodec(i, codec) == 0) { - codecs.push_back(codec); - } - } - return codecs; -} - -VideoSendStream::Config Call::GetDefaultSendConfig() { - VideoSendStream::Config config; - codec_->GetCodec(0, config.codec); - return config; -} - -VideoSendStream* Call::CreateSendStream(const VideoSendStream::Config& config) { - assert(config.rtp.ssrcs.size() > 0); - assert(config.codec.numberOfSimulcastStreams == 0 || - config.codec.numberOfSimulcastStreams == config.rtp.ssrcs.size()); - - VideoSendStream* send_stream = new VideoSendStream( - config_.send_transport, config_.overuse_detection, video_engine_, config); - - WriteLockScoped write_lock(*send_lock_); - for (size_t i = 0; i < config.rtp.ssrcs.size(); ++i) { - assert(send_ssrcs_.find(config.rtp.ssrcs[i]) == send_ssrcs_.end()); - send_ssrcs_[config.rtp.ssrcs[i]] = send_stream; - } - return send_stream; -} - -SendStreamState* Call::DestroySendStream(webrtc::VideoSendStream* send_stream) { - assert(send_stream != NULL); - - VideoSendStream* send_stream_impl = NULL; - { - WriteLockScoped write_lock(*send_lock_); - for (std::map::iterator it = - send_ssrcs_.begin(); - it != send_ssrcs_.end(); - ++it) { - if (it->second == static_cast(send_stream)) { - send_stream_impl = it->second; - send_ssrcs_.erase(it); - break; - } - } - } - - assert(send_stream_impl != NULL); - delete send_stream_impl; - - // TODO(pbos): Return its previous state - return NULL; -} - -VideoReceiveStream::Config Call::GetDefaultReceiveConfig() { - return VideoReceiveStream::Config(); -} - -VideoReceiveStream* Call::CreateReceiveStream( - const VideoReceiveStream::Config& config) { - VideoReceiveStream* receive_stream = - new VideoReceiveStream(video_engine_, config, config_.send_transport); - - WriteLockScoped write_lock(*receive_lock_); - assert(receive_ssrcs_.find(config.rtp.ssrc) == receive_ssrcs_.end()); - receive_ssrcs_[config.rtp.ssrc] = receive_stream; - return receive_stream; -} - -void Call::DestroyReceiveStream(webrtc::VideoReceiveStream* receive_stream) { - assert(receive_stream != NULL); - - VideoReceiveStream* receive_stream_impl = NULL; - { - WriteLockScoped write_lock(*receive_lock_); - for (std::map::iterator it = - receive_ssrcs_.begin(); - it != receive_ssrcs_.end(); - ++it) { - if (it->second == static_cast(receive_stream)) { - receive_stream_impl = it->second; - receive_ssrcs_.erase(it); - break; - } - } - } - - assert(receive_stream_impl != NULL); - delete receive_stream_impl; -} - -uint32_t Call::SendBitrateEstimate() { - // TODO(pbos): Return send-bitrate estimate - return 0; -} - -uint32_t Call::ReceiveBitrateEstimate() { - // TODO(pbos): Return receive-bitrate estimate - return 0; -} - -bool Call::DeliverRtcp(const uint8_t* packet, size_t length) { - // TODO(pbos): Figure out what channel needs it actually. - // Do NOT broadcast! Also make sure it's a valid packet. - bool rtcp_delivered = false; - { - ReadLockScoped read_lock(*receive_lock_); - for (std::map::iterator it = - receive_ssrcs_.begin(); - it != receive_ssrcs_.end(); - ++it) { - if (it->second->DeliverRtcp(packet, length)) - rtcp_delivered = true; - } - } - - { - ReadLockScoped read_lock(*send_lock_); - for (std::map::iterator it = - send_ssrcs_.begin(); - it != send_ssrcs_.end(); - ++it) { - if (it->second->DeliverRtcp(packet, length)) - rtcp_delivered = true; - } - } - return rtcp_delivered; -} - -bool Call::DeliverRtp(const RTPHeader& header, - const uint8_t* packet, - size_t length) { - VideoReceiveStream* receiver; - { - ReadLockScoped read_lock(*receive_lock_); - std::map::iterator it = - receive_ssrcs_.find(header.ssrc); - if (it == receive_ssrcs_.end()) { - // TODO(pbos): Log some warning, SSRC without receiver. - return false; - } - - receiver = it->second; - } - return receiver->DeliverRtp(static_cast(packet), length); -} - -bool Call::DeliverPacket(const uint8_t* packet, size_t length) { - // TODO(pbos): ExtensionMap if there are extensions. - if (RtpHeaderParser::IsRtcp(packet, static_cast(length))) - return DeliverRtcp(packet, length); - - RTPHeader rtp_header; - if (!rtp_header_parser_->Parse(packet, static_cast(length), &rtp_header)) - return false; - - return DeliverRtp(rtp_header, packet, length); -} - -} // namespace internal -} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/video_engine/internal/call.h b/media/webrtc/trunk/webrtc/video_engine/internal/call.h deleted file mode 100644 index 7891bfd88581..000000000000 --- a/media/webrtc/trunk/webrtc/video_engine/internal/call.h +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef WEBRTC_VIDEO_ENGINE_INTERNAL_CALL_H_ -#define WEBRTC_VIDEO_ENGINE_INTERNAL_CALL_H_ - -#include -#include - -#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h" -#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h" -#include "webrtc/system_wrappers/interface/scoped_ptr.h" -#include "webrtc/video_engine/internal/video_receive_stream.h" -#include "webrtc/video_engine/internal/video_send_stream.h" -#include "webrtc/video_engine/new_include/call.h" - -namespace webrtc { - -class VideoEngine; -class ViERTP_RTCP; -class ViECodec; - -namespace internal { - -// TODO(pbos): Split out the packet receiver, should be sharable between -// VideoEngine and VoiceEngine. -class Call : public webrtc::Call, public PacketReceiver { - public: - Call(webrtc::VideoEngine* video_engine, const Call::Config& config); - virtual ~Call(); - - virtual PacketReceiver* Receiver() OVERRIDE; - virtual std::vector GetVideoCodecs() OVERRIDE; - - virtual VideoSendStream::Config GetDefaultSendConfig() OVERRIDE; - - virtual VideoSendStream* CreateSendStream( - const VideoSendStream::Config& config) OVERRIDE; - - virtual SendStreamState* DestroySendStream( - webrtc::VideoSendStream* send_stream) OVERRIDE; - - virtual VideoReceiveStream::Config GetDefaultReceiveConfig() OVERRIDE; - - virtual VideoReceiveStream* CreateReceiveStream( - const VideoReceiveStream::Config& config) OVERRIDE; - - virtual void DestroyReceiveStream( - webrtc::VideoReceiveStream* receive_stream) OVERRIDE; - - virtual uint32_t SendBitrateEstimate() OVERRIDE; - virtual uint32_t ReceiveBitrateEstimate() OVERRIDE; - - virtual bool DeliverPacket(const uint8_t* packet, size_t length) OVERRIDE; - - private: - bool DeliverRtcp(const uint8_t* packet, size_t length); - bool DeliverRtp(const RTPHeader& header, - const uint8_t* packet, - size_t length); - - Call::Config config_; - - std::map receive_ssrcs_; - scoped_ptr receive_lock_; - - std::map send_ssrcs_; - scoped_ptr send_lock_; - - scoped_ptr rtp_header_parser_; - - webrtc::VideoEngine* video_engine_; - ViERTP_RTCP* rtp_rtcp_; - ViECodec* codec_; - - DISALLOW_COPY_AND_ASSIGN(Call); -}; -} // namespace internal -} // namespace webrtc - -#endif // WEBRTC_VIDEO_ENGINE_INTERNAL_CALL_H_ diff --git a/media/webrtc/trunk/webrtc/video_engine/internal/video_receive_stream.cc b/media/webrtc/trunk/webrtc/video_engine/internal/video_receive_stream.cc deleted file mode 100644 index 07f120cb09b0..000000000000 --- a/media/webrtc/trunk/webrtc/video_engine/internal/video_receive_stream.cc +++ /dev/null @@ -1,181 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "webrtc/video_engine/internal/video_receive_stream.h" - -#include -#include - -#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" -#include "webrtc/system_wrappers/interface/clock.h" -#include "webrtc/video_engine/include/vie_base.h" -#include "webrtc/video_engine/include/vie_capture.h" -#include "webrtc/video_engine/include/vie_codec.h" -#include "webrtc/video_engine/include/vie_external_codec.h" -#include "webrtc/video_engine/include/vie_network.h" -#include "webrtc/video_engine/include/vie_render.h" -#include "webrtc/video_engine/include/vie_rtp_rtcp.h" -#include "webrtc/video_engine/new_include/video_receive_stream.h" - -namespace webrtc { -namespace internal { - -VideoReceiveStream::VideoReceiveStream(webrtc::VideoEngine* video_engine, - const VideoReceiveStream::Config& config, - newapi::Transport* transport) - : transport_adapter_(transport), config_(config), channel_(-1) { - video_engine_base_ = ViEBase::GetInterface(video_engine); - // TODO(mflodman): Use the other CreateChannel method. - video_engine_base_->CreateChannel(channel_); - assert(channel_ != -1); - - rtp_rtcp_ = ViERTP_RTCP::GetInterface(video_engine); - assert(rtp_rtcp_ != NULL); - - // TODO(pbos): This is not fine grained enough... - rtp_rtcp_->SetNACKStatus(channel_, config_.rtp.nack.rtp_history_ms > 0); - rtp_rtcp_->SetKeyFrameRequestMethod(channel_, kViEKeyFrameRequestPliRtcp); - - assert(config_.rtp.ssrc != 0); - - network_ = ViENetwork::GetInterface(video_engine); - assert(network_ != NULL); - - network_->RegisterSendTransport(channel_, transport_adapter_); - - codec_ = ViECodec::GetInterface(video_engine); - - for (size_t i = 0; i < config_.codecs.size(); ++i) { - if (codec_->SetReceiveCodec(channel_, config_.codecs[i]) != 0) { - // TODO(pbos): Abort gracefully, this can be a runtime error. - // Factor out to an Init() method. - abort(); - } - } - - external_codec_ = ViEExternalCodec::GetInterface(video_engine); - for (size_t i = 0; i < config_.external_decoders.size(); ++i) { - ExternalVideoDecoder* decoder = &config_.external_decoders[i]; - if (external_codec_->RegisterExternalReceiveCodec( - channel_, - decoder->payload_type, - decoder->decoder, - decoder->renderer, - decoder->expected_delay_ms) != - 0) { - // TODO(pbos): Abort gracefully? Can this be a runtime error? - abort(); - } - } - - render_ = webrtc::ViERender::GetInterface(video_engine); - assert(render_ != NULL); - - if (render_->AddRenderer(channel_, kVideoI420, this) != 0) { - abort(); - } - - clock_ = Clock::GetRealTimeClock(); -} - -VideoReceiveStream::~VideoReceiveStream() { - for (size_t i = 0; i < config_.external_decoders.size(); ++i) { - external_codec_->DeRegisterExternalReceiveCodec( - channel_, config_.external_decoders[i].payload_type); - } - - network_->DeregisterSendTransport(channel_); - - video_engine_base_->Release(); - external_codec_->Release(); - codec_->Release(); - network_->Release(); - render_->Release(); - rtp_rtcp_->Release(); -} - -void VideoReceiveStream::StartReceive() { - if (render_->StartRender(channel_)) { - abort(); - } - if (video_engine_base_->StartReceive(channel_) != 0) { - abort(); - } -} - -void VideoReceiveStream::StopReceive() { - if (render_->StopRender(channel_)) { - abort(); - } - if (video_engine_base_->StopReceive(channel_) != 0) { - abort(); - } -} - -void VideoReceiveStream::GetCurrentReceiveCodec(VideoCodec* receive_codec) { - // TODO(pbos): Implement -} - -bool VideoReceiveStream::DeliverRtcp(const uint8_t* packet, size_t length) { - return network_->ReceivedRTCPPacket(channel_, packet, length) == 0; -} - -bool VideoReceiveStream::DeliverRtp(const uint8_t* packet, size_t length) { - return network_->ReceivedRTPPacket(channel_, packet, length) == 0; -} - -int VideoReceiveStream::FrameSizeChange(unsigned int width, - unsigned int height, - unsigned int /*number_of_streams*/) { - width_ = width; - height_ = height; - return 0; -} - -int VideoReceiveStream::DeliverFrame(uint8_t* frame, - int buffer_size, - uint32_t timestamp, - int64_t render_time, - void* /*handle*/) { - if (config_.renderer == NULL) { - return 0; - } - - I420VideoFrame video_frame; - video_frame.CreateEmptyFrame(width_, height_, width_, height_, height_); - ConvertToI420(kI420, - frame, - 0, - 0, - width_, - height_, - buffer_size, - webrtc::kRotateNone, - &video_frame); - video_frame.set_timestamp(timestamp); - video_frame.set_render_time_ms(render_time); - - if (config_.post_decode_callback != NULL) { - config_.post_decode_callback->FrameCallback(&video_frame); - } - - if (config_.renderer != NULL) { - // TODO(pbos): Add timing to RenderFrame call - config_.renderer->RenderFrame(video_frame, - render_time - clock_->TimeInMilliseconds()); - } - - return 0; -} - -bool VideoReceiveStream::IsTextureSupported() { return false; } - -} // internal -} // webrtc diff --git a/media/webrtc/trunk/webrtc/video_engine/internal/video_send_stream.cc b/media/webrtc/trunk/webrtc/video_engine/internal/video_send_stream.cc deleted file mode 100644 index 34cafc87b674..000000000000 --- a/media/webrtc/trunk/webrtc/video_engine/internal/video_send_stream.cc +++ /dev/null @@ -1,278 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "webrtc/video_engine/internal/video_send_stream.h" - -#include - -#include - -#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" -#include "webrtc/video_engine/include/vie_base.h" -#include "webrtc/video_engine/include/vie_capture.h" -#include "webrtc/video_engine/include/vie_codec.h" -#include "webrtc/video_engine/include/vie_external_codec.h" -#include "webrtc/video_engine/include/vie_network.h" -#include "webrtc/video_engine/include/vie_rtp_rtcp.h" -#include "webrtc/video_engine/new_include/video_send_stream.h" - -namespace webrtc { -namespace internal { - -// Super simple and temporary overuse logic. This will move to the application -// as soon as the new API allows changing send codec on the fly. -class ResolutionAdaptor : public webrtc::CpuOveruseObserver { - public: - ResolutionAdaptor(ViECodec* codec, int channel, size_t width, size_t height) - : codec_(codec), - channel_(channel), - max_width_(width), - max_height_(height) {} - - virtual ~ResolutionAdaptor() {} - - virtual void OveruseDetected() OVERRIDE { - VideoCodec codec; - if (codec_->GetSendCodec(channel_, codec) != 0) - return; - - if (codec.width / 2 < min_width || codec.height / 2 < min_height) - return; - - codec.width /= 2; - codec.height /= 2; - codec_->SetSendCodec(channel_, codec); - } - - virtual void NormalUsage() OVERRIDE { - VideoCodec codec; - if (codec_->GetSendCodec(channel_, codec) != 0) - return; - - if (codec.width * 2u > max_width_ || codec.height * 2u > max_height_) - return; - - codec.width *= 2; - codec.height *= 2; - codec_->SetSendCodec(channel_, codec); - } - - private: - // Temporary and arbitrary chosen minimum resolution. - static const size_t min_width = 160; - static const size_t min_height = 120; - - ViECodec* codec_; - const int channel_; - - const size_t max_width_; - const size_t max_height_; -}; - -VideoSendStream::VideoSendStream(newapi::Transport* transport, - bool overuse_detection, - webrtc::VideoEngine* video_engine, - const VideoSendStream::Config& config) - : transport_adapter_(transport), config_(config), external_codec_(NULL) { - - if (config_.codec.numberOfSimulcastStreams > 0) { - assert(config_.rtp.ssrcs.size() == config_.codec.numberOfSimulcastStreams); - } else { - assert(config_.rtp.ssrcs.size() == 1); - } - - video_engine_base_ = ViEBase::GetInterface(video_engine); - video_engine_base_->CreateChannel(channel_); - assert(channel_ != -1); - - rtp_rtcp_ = ViERTP_RTCP::GetInterface(video_engine); - assert(rtp_rtcp_ != NULL); - - if (config_.rtp.ssrcs.size() == 1) { - rtp_rtcp_->SetLocalSSRC(channel_, config_.rtp.ssrcs[0]); - } else { - for (size_t i = 0; i < config_.rtp.ssrcs.size(); ++i) { - rtp_rtcp_->SetLocalSSRC(channel_, config_.rtp.ssrcs[i], - kViEStreamTypeNormal, i); - } - } - rtp_rtcp_->SetTransmissionSmoothingStatus(channel_, config_.pacing); - if (!config_.rtp.rtx.ssrcs.empty()) { - assert(config_.rtp.rtx.ssrcs.size() == config_.rtp.ssrcs.size()); - for (size_t i = 0; i < config_.rtp.rtx.ssrcs.size(); ++i) { - rtp_rtcp_->SetLocalSSRC( - channel_, config_.rtp.rtx.ssrcs[i], kViEStreamTypeRtx, i); - } - - if (config_.rtp.rtx.rtx_payload_type != 0) { - rtp_rtcp_->SetRtxSendPayloadType(channel_, - config_.rtp.rtx.rtx_payload_type); - } - } - - for (size_t i = 0; i < config_.rtp.extensions.size(); ++i) { - const std::string& extension = config_.rtp.extensions[i].name; - int id = config_.rtp.extensions[i].id; - if (extension == "toffset") { - if (rtp_rtcp_->SetSendTimestampOffsetStatus(channel_, true, id) != 0) - abort(); - } else if (extension == "abs-send-time") { - if (rtp_rtcp_->SetSendAbsoluteSendTimeStatus(channel_, true, id) != 0) - abort(); - } else { - abort(); // Unsupported extension. - } - } - - // Enable NACK, FEC or both. - if (config_.rtp.fec.red_payload_type != -1) { - assert(config_.rtp.fec.ulpfec_payload_type != -1); - if (config_.rtp.nack.rtp_history_ms > 0) { - rtp_rtcp_->SetHybridNACKFECStatus( - channel_, - true, - static_cast(config_.rtp.fec.red_payload_type), - static_cast(config_.rtp.fec.ulpfec_payload_type)); - } else { - rtp_rtcp_->SetFECStatus( - channel_, - true, - static_cast(config_.rtp.fec.red_payload_type), - static_cast(config_.rtp.fec.ulpfec_payload_type)); - } - } else { - rtp_rtcp_->SetNACKStatus(channel_, config_.rtp.nack.rtp_history_ms > 0); - } - - char rtcp_cname[ViERTP_RTCP::KMaxRTCPCNameLength]; - assert(config_.rtp.c_name.length() < ViERTP_RTCP::KMaxRTCPCNameLength); - strncpy(rtcp_cname, config_.rtp.c_name.c_str(), sizeof(rtcp_cname) - 1); - rtcp_cname[sizeof(rtcp_cname) - 1] = '\0'; - - rtp_rtcp_->SetRTCPCName(channel_, rtcp_cname); - - capture_ = ViECapture::GetInterface(video_engine); - capture_->AllocateExternalCaptureDevice(capture_id_, external_capture_); - capture_->ConnectCaptureDevice(capture_id_, channel_); - - network_ = ViENetwork::GetInterface(video_engine); - assert(network_ != NULL); - - network_->RegisterSendTransport(channel_, transport_adapter_); - - if (config.encoder) { - external_codec_ = ViEExternalCodec::GetInterface(video_engine); - if (external_codec_->RegisterExternalSendCodec( - channel_, config.codec.plType, config.encoder, - config.internal_source) != 0) { - abort(); - } - } - - codec_ = ViECodec::GetInterface(video_engine); - if (codec_->SetSendCodec(channel_, config_.codec) != 0) { - abort(); - } - - if (overuse_detection) { - overuse_observer_.reset( - new ResolutionAdaptor(codec_, channel_, config_.codec.width, - config_.codec.height)); - video_engine_base_->RegisterCpuOveruseObserver(channel_, - overuse_observer_.get()); - } -} - -VideoSendStream::~VideoSendStream() { - network_->DeregisterSendTransport(channel_); - video_engine_base_->DeleteChannel(channel_); - - capture_->DisconnectCaptureDevice(channel_); - capture_->ReleaseCaptureDevice(capture_id_); - - if (external_codec_) { - external_codec_->DeRegisterExternalSendCodec(channel_, - config_.codec.plType); - } - - video_engine_base_->Release(); - capture_->Release(); - codec_->Release(); - if (external_codec_) - external_codec_->Release(); - network_->Release(); - rtp_rtcp_->Release(); -} - -void VideoSendStream::PutFrame(const I420VideoFrame& frame, - uint32_t time_since_capture_ms) { - // TODO(pbos): frame_copy should happen after the VideoProcessingModule has - // resized the frame. - I420VideoFrame frame_copy; - frame_copy.CopyFrame(frame); - - if (config_.pre_encode_callback != NULL) { - config_.pre_encode_callback->FrameCallback(&frame_copy); - } - - ViEVideoFrameI420 vf; - - // TODO(pbos): This represents a memcpy step and is only required because - // external_capture_ only takes ViEVideoFrameI420s. - vf.y_plane = frame_copy.buffer(kYPlane); - vf.u_plane = frame_copy.buffer(kUPlane); - vf.v_plane = frame_copy.buffer(kVPlane); - vf.y_pitch = frame.stride(kYPlane); - vf.u_pitch = frame.stride(kUPlane); - vf.v_pitch = frame.stride(kVPlane); - vf.width = frame.width(); - vf.height = frame.height(); - - external_capture_->IncomingFrameI420(vf, frame.render_time_ms()); - - if (config_.local_renderer != NULL) { - config_.local_renderer->RenderFrame(frame, 0); - } -} - -VideoSendStreamInput* VideoSendStream::Input() { return this; } - -void VideoSendStream::StartSend() { - if (video_engine_base_->StartSend(channel_) != 0) - abort(); - if (video_engine_base_->StartReceive(channel_) != 0) - abort(); -} - -void VideoSendStream::StopSend() { - if (video_engine_base_->StopSend(channel_) != 0) - abort(); - if (video_engine_base_->StopReceive(channel_) != 0) - abort(); -} - -bool VideoSendStream::SetTargetBitrate( - int min_bitrate, - int max_bitrate, - const std::vector& streams) { - return false; -} - -void VideoSendStream::GetSendCodec(VideoCodec* send_codec) { - *send_codec = config_.codec; -} - -bool VideoSendStream::DeliverRtcp(const uint8_t* packet, size_t length) { - return network_->ReceivedRTCPPacket( - channel_, packet, static_cast(length)) == 0; -} - -} // namespace internal -} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/video_engine/internal/video_send_stream.h b/media/webrtc/trunk/webrtc/video_engine/internal/video_send_stream.h deleted file mode 100644 index 1241b483d45d..000000000000 --- a/media/webrtc/trunk/webrtc/video_engine/internal/video_send_stream.h +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef WEBRTC_VIDEO_ENGINE_VIDEO_SEND_STREAM_IMPL_H_ -#define WEBRTC_VIDEO_ENGINE_VIDEO_SEND_STREAM_IMPL_H_ - -#include - -#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" -#include "webrtc/video_engine/internal/transport_adapter.h" -#include "webrtc/video_engine/new_include/video_receive_stream.h" -#include "webrtc/video_engine/new_include/video_send_stream.h" - -namespace webrtc { - -class VideoEngine; -class ViEBase; -class ViECapture; -class ViECodec; -class ViEExternalCapture; -class ViEExternalCodec; -class ViENetwork; -class ViERTP_RTCP; - -namespace internal { - -class ResolutionAdaptor; - -class VideoSendStream : public webrtc::VideoSendStream, - public VideoSendStreamInput { - public: - VideoSendStream(newapi::Transport* transport, - bool overuse_detection, - webrtc::VideoEngine* video_engine, - const VideoSendStream::Config& config); - - virtual ~VideoSendStream(); - - virtual void PutFrame(const I420VideoFrame& frame, - uint32_t time_since_capture_ms) OVERRIDE; - - virtual VideoSendStreamInput* Input() OVERRIDE; - - virtual void StartSend() OVERRIDE; - - virtual void StopSend() OVERRIDE; - - virtual bool SetTargetBitrate(int min_bitrate, int max_bitrate, - const std::vector& streams) - OVERRIDE; - - virtual void GetSendCodec(VideoCodec* send_codec) OVERRIDE; - - public: - bool DeliverRtcp(const uint8_t* packet, size_t length); - - private: - TransportAdapter transport_adapter_; - VideoSendStream::Config config_; - - ViEBase* video_engine_base_; - ViECapture* capture_; - ViECodec* codec_; - ViEExternalCapture* external_capture_; - ViEExternalCodec* external_codec_; - ViENetwork* network_; - ViERTP_RTCP* rtp_rtcp_; - - int channel_; - int capture_id_; - scoped_ptr overuse_observer_; -}; -} // namespace internal -} // namespace webrtc - -#endif // WEBRTC_VIDEO_ENGINE_INTERNAL_VIDEO_SEND_STREAM_H_ diff --git a/media/webrtc/trunk/webrtc/video_engine/overuse_frame_detector.cc b/media/webrtc/trunk/webrtc/video_engine/overuse_frame_detector.cc index 96cea176ff98..21aa7690b130 100644 --- a/media/webrtc/trunk/webrtc/video_engine/overuse_frame_detector.cc +++ b/media/webrtc/trunk/webrtc/video_engine/overuse_frame_detector.cc @@ -10,10 +10,12 @@ #include "webrtc/video_engine/overuse_frame_detector.h" -#include #include #include +#include +#include + #include "webrtc/modules/video_coding/utility/include/exp_filter.h" #include "webrtc/system_wrappers/interface/clock.h" #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" @@ -27,11 +29,16 @@ namespace webrtc { namespace { const int64_t kProcessIntervalMs = 5000; +// Number of initial process times before reporting. +const int64_t kMinProcessCountBeforeReporting = 3; + +const int64_t kFrameTimeoutIntervalMs = 1500; + // Consecutive checks above threshold to trigger overuse. const int kConsecutiveChecksAboveThreshold = 2; // Minimum samples required to perform a check. -const size_t kMinFrameSampleCount = 15; +const size_t kMinFrameSampleCount = 120; // Weight factor to apply to the standard deviation. const float kWeightFactor = 0.997f; @@ -47,6 +54,13 @@ const int kMaxRampUpDelayMs = 120 * 1000; // Expontential back-off factor, to prevent annoying up-down behaviour. const double kRampUpBackoffFactor = 2.0; +// The initial average encode time (set to a fairly small value). +const float kInitialAvgEncodeTimeMs = 5.0f; + +// The maximum exponent to use in VCMExpFilter. +const float kSampleDiffMs = 33.0f; +const float kMaxExp = 7.0f; + } // namespace Statistics::Statistics() : @@ -74,8 +88,8 @@ void Statistics::AddSample(float sample_ms) { return; } - float exp = sample_ms/33.0f; - exp = std::min(exp, 7.0f); + float exp = sample_ms / kSampleDiffMs; + exp = std::min(exp, kMaxExp); filtered_samples_->Apply(exp, sample_ms); filtered_variance_->Apply(exp, (sample_ms - filtered_samples_->Value()) * (sample_ms - filtered_samples_->Value())); @@ -101,22 +115,152 @@ float Statistics::StdDev() const { uint64_t Statistics::Count() const { return count_; } + +// Class for calculating the average encode time. +class OveruseFrameDetector::EncodeTimeAvg { + public: + EncodeTimeAvg() + : kWeightFactor(0.5f), + filtered_encode_time_ms_(new VCMExpFilter(kWeightFactor)) { + filtered_encode_time_ms_->Apply(1.0f, kInitialAvgEncodeTimeMs); + } + ~EncodeTimeAvg() {} + + void AddEncodeSample(float encode_time_ms, int64_t diff_last_sample_ms) { + float exp = diff_last_sample_ms / kSampleDiffMs; + exp = std::min(exp, kMaxExp); + filtered_encode_time_ms_->Apply(exp, encode_time_ms); + } + + int filtered_encode_time_ms() const { + return static_cast(filtered_encode_time_ms_->Value() + 0.5); + } + + private: + const float kWeightFactor; + scoped_ptr filtered_encode_time_ms_; +}; + +// Class for calculating the encode usage. +class OveruseFrameDetector::EncodeUsage { + public: + EncodeUsage() + : kWeightFactorFrameDiff(0.998f), + kWeightFactorEncodeTime(0.995f), + filtered_encode_time_ms_(new VCMExpFilter(kWeightFactorEncodeTime)), + filtered_frame_diff_ms_(new VCMExpFilter(kWeightFactorFrameDiff)) { + filtered_encode_time_ms_->Apply(1.0f, kInitialAvgEncodeTimeMs); + filtered_frame_diff_ms_->Apply(1.0f, kSampleDiffMs); + } + ~EncodeUsage() {} + + void AddSample(float sample_ms) { + float exp = sample_ms / kSampleDiffMs; + exp = std::min(exp, kMaxExp); + filtered_frame_diff_ms_->Apply(exp, sample_ms); + } + + void AddEncodeSample(float encode_time_ms, int64_t diff_last_sample_ms) { + float exp = diff_last_sample_ms / kSampleDiffMs; + exp = std::min(exp, kMaxExp); + filtered_encode_time_ms_->Apply(exp, encode_time_ms); + } + + int UsageInPercent() const { + float frame_diff_ms = std::max(filtered_frame_diff_ms_->Value(), 1.0f); + float encode_usage_percent = + 100.0f * filtered_encode_time_ms_->Value() / frame_diff_ms; + return static_cast(encode_usage_percent + 0.5); + } + + private: + const float kWeightFactorFrameDiff; + const float kWeightFactorEncodeTime; + scoped_ptr filtered_encode_time_ms_; + scoped_ptr filtered_frame_diff_ms_; +}; + +// Class for calculating the capture queue delay change. +class OveruseFrameDetector::CaptureQueueDelay { + public: + CaptureQueueDelay() + : kWeightFactor(0.5f), + delay_ms_(0), + filtered_delay_ms_per_s_(new VCMExpFilter(kWeightFactor)) { + filtered_delay_ms_per_s_->Apply(1.0f, 0.0f); + } + ~CaptureQueueDelay() {} + + void FrameCaptured(int64_t now) { + const size_t kMaxSize = 200; + if (frames_.size() > kMaxSize) { + frames_.pop_front(); + } + frames_.push_back(now); + } + + void FrameProcessingStarted(int64_t now) { + if (frames_.empty()) { + return; + } + delay_ms_ = now - frames_.front(); + frames_.pop_front(); + } + + void CalculateDelayChange(int64_t diff_last_sample_ms) { + if (diff_last_sample_ms <= 0) { + return; + } + float exp = static_cast(diff_last_sample_ms) / kProcessIntervalMs; + exp = std::min(exp, kMaxExp); + filtered_delay_ms_per_s_->Apply(exp, + delay_ms_ * 1000.0f / diff_last_sample_ms); + ClearFrames(); + } + + void ClearFrames() { + frames_.clear(); + } + + int delay_ms() const { + return delay_ms_; + } + + int filtered_delay_ms_per_s() const { + return static_cast(filtered_delay_ms_per_s_->Value() + 0.5); + } + + private: + const float kWeightFactor; + std::list frames_; + int delay_ms_; + scoped_ptr filtered_delay_ms_per_s_; +}; + OveruseFrameDetector::OveruseFrameDetector(Clock* clock, float normaluse_stddev_ms, float overuse_stddev_ms) : crit_(CriticalSectionWrapper::CreateCriticalSection()), normaluse_stddev_ms_(normaluse_stddev_ms), overuse_stddev_ms_(overuse_stddev_ms), + min_process_count_before_reporting_(kMinProcessCountBeforeReporting), observer_(NULL), clock_(clock), next_process_time_(clock_->TimeInMilliseconds()), + num_process_times_(0), last_capture_time_(0), last_overuse_time_(0), checks_above_threshold_(0), last_rampup_time_(0), in_quick_rampup_(false), current_rampup_delay_ms_(kStandardRampUpDelayMs), - num_pixels_(0) {} + num_pixels_(0), + last_capture_jitter_ms_(-1), + last_encode_sample_ms_(0), + encode_time_(new EncodeTimeAvg()), + encode_usage_(new EncodeUsage()), + capture_queue_delay_(new CaptureQueueDelay()) { +} OveruseFrameDetector::~OveruseFrameDetector() { } @@ -126,22 +270,24 @@ void OveruseFrameDetector::SetObserver(CpuOveruseObserver* observer) { observer_ = observer; } -void OveruseFrameDetector::FrameCaptured(int width, int height) { +int OveruseFrameDetector::AvgEncodeTimeMs() const { CriticalSectionScoped cs(crit_.get()); + return encode_time_->filtered_encode_time_ms(); +} - int num_pixels = width * height; - if (num_pixels != num_pixels_) { - // Frame size changed, reset statistics. - num_pixels_ = num_pixels; - capture_deltas_.Reset(); - last_capture_time_ = 0; - } +int OveruseFrameDetector::EncodeUsagePercent() const { + CriticalSectionScoped cs(crit_.get()); + return encode_usage_->UsageInPercent(); +} - int64_t time = clock_->TimeInMilliseconds(); - if (last_capture_time_ != 0) { - capture_deltas_.AddSample(time - last_capture_time_); - } - last_capture_time_ = time; +int OveruseFrameDetector::AvgCaptureQueueDelayMsPerS() const { + CriticalSectionScoped cs(crit_.get()); + return capture_queue_delay_->filtered_delay_ms_per_s(); +} + +int OveruseFrameDetector::CaptureQueueDelayMsPerS() const { + CriticalSectionScoped cs(crit_.get()); + return capture_queue_delay_->delay_ms(); } int32_t OveruseFrameDetector::TimeUntilNextProcess() { @@ -149,6 +295,57 @@ int32_t OveruseFrameDetector::TimeUntilNextProcess() { return next_process_time_ - clock_->TimeInMilliseconds(); } +bool OveruseFrameDetector::DetectFrameTimeout(int64_t now) const { + if (last_capture_time_ == 0) { + return false; + } + return (now - last_capture_time_) > kFrameTimeoutIntervalMs; +} + +void OveruseFrameDetector::FrameCaptured(int width, int height) { + CriticalSectionScoped cs(crit_.get()); + + int64_t now = clock_->TimeInMilliseconds(); + int num_pixels = width * height; + if (num_pixels != num_pixels_ || DetectFrameTimeout(now)) { + // Frame size changed, reset statistics. + num_pixels_ = num_pixels; + capture_deltas_.Reset(); + last_capture_time_ = 0; + capture_queue_delay_->ClearFrames(); + num_process_times_ = 0; + } + + if (last_capture_time_ != 0) { + capture_deltas_.AddSample(now - last_capture_time_); + encode_usage_->AddSample(now - last_capture_time_); + } + last_capture_time_ = now; + + capture_queue_delay_->FrameCaptured(now); +} + +void OveruseFrameDetector::FrameProcessingStarted() { + CriticalSectionScoped cs(crit_.get()); + capture_queue_delay_->FrameProcessingStarted(clock_->TimeInMilliseconds()); +} + +void OveruseFrameDetector::FrameEncoded(int encode_time_ms) { + CriticalSectionScoped cs(crit_.get()); + int64_t time = clock_->TimeInMilliseconds(); + if (last_encode_sample_ms_ != 0) { + int64_t diff_ms = time - last_encode_sample_ms_; + encode_time_->AddEncodeSample(encode_time_ms, diff_ms); + encode_usage_->AddEncodeSample(encode_time_ms, diff_ms); + } + last_encode_sample_ms_ = time; +} + +int OveruseFrameDetector::last_capture_jitter_ms() const { + CriticalSectionScoped cs(crit_.get()); + return last_capture_jitter_ms_; +} + int32_t OveruseFrameDetector::Process() { CriticalSectionScoped cs(crit_.get()); @@ -158,12 +355,20 @@ int32_t OveruseFrameDetector::Process() { if (now < next_process_time_) return 0; + int64_t diff_ms = now - next_process_time_ + kProcessIntervalMs; next_process_time_ = now + kProcessIntervalMs; + ++num_process_times_; // Don't trigger overuse unless we've seen a certain number of frames. if (capture_deltas_.Count() < kMinFrameSampleCount) return 0; + capture_queue_delay_->CalculateDelayChange(diff_ms); + + if (num_process_times_ <= min_process_count_before_reporting_) { + return 0; + } + if (IsOverusing()) { // If the last thing we did was going up, and now have to back down, we need // to check if this peak was short. If so we should back off to avoid going @@ -208,6 +413,7 @@ int32_t OveruseFrameDetector::Process() { overuse_stddev_ms_, normaluse_stddev_ms_); + last_capture_jitter_ms_ = static_cast(capture_deltas_.StdDev() + 0.5); return 0; } diff --git a/media/webrtc/trunk/webrtc/video_engine/overuse_frame_detector.h b/media/webrtc/trunk/webrtc/video_engine/overuse_frame_detector.h index 4717a6061712..c9f691cc072f 100644 --- a/media/webrtc/trunk/webrtc/video_engine/overuse_frame_detector.h +++ b/media/webrtc/trunk/webrtc/video_engine/overuse_frame_detector.h @@ -14,6 +14,7 @@ #include "webrtc/modules/interface/module.h" #include "webrtc/system_wrappers/interface/constructor_magic.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/test/testsupport/gtest_prod_util.h" namespace webrtc { @@ -27,8 +28,8 @@ class VCMExpFilter; const float kOveruseStdDevMs = 15.0f; const float kNormalUseStdDevMs = 7.0f; #elif WEBRTC_MAC -const float kOveruseStdDevMs = 22.0f; -const float kNormalUseStdDevMs = 12.0f; +const float kOveruseStdDevMs = 24.0f; +const float kNormalUseStdDevMs = 14.0f; #else const float kOveruseStdDevMs = 17.0f; const float kNormalUseStdDevMs = 10.0f; @@ -71,14 +72,61 @@ class OveruseFrameDetector : public Module { // Called for each captured frame. void FrameCaptured(int width, int height); + // Called when the processing of a captured frame is started. + void FrameProcessingStarted(); + + // Called for each encoded frame. + void FrameEncoded(int encode_time_ms); + + // Accessors. + // The last estimated jitter based on the incoming captured frames. + int last_capture_jitter_ms() const; + + // Running average of reported encode time (FrameEncoded()). + // Only used for stats. + int AvgEncodeTimeMs() const; + + // The average encode time divided by the average time difference between + // incoming captured frames. + // This variable is currently only used for statistics. + int EncodeUsagePercent() const; + + // The current time delay between an incoming captured frame (FrameCaptured()) + // until the frame is being processed (FrameProcessingStarted()). + // (Note: if a new frame is received before an old frame has been processed, + // the old frame is skipped). + // The delay is returned as the delay in ms per second. + // This variable is currently only used for statistics. + int AvgCaptureQueueDelayMsPerS() const; + int CaptureQueueDelayMsPerS() const; + // Implements Module. virtual int32_t TimeUntilNextProcess() OVERRIDE; virtual int32_t Process() OVERRIDE; private: + FRIEND_TEST_ALL_PREFIXES(OveruseFrameDetectorTest, TriggerOveruse); + FRIEND_TEST_ALL_PREFIXES(OveruseFrameDetectorTest, OveruseAndRecover); + FRIEND_TEST_ALL_PREFIXES(OveruseFrameDetectorTest, DoubleOveruseAndRecover); + FRIEND_TEST_ALL_PREFIXES( + OveruseFrameDetectorTest, TriggerNormalUsageWithMinProcessCount); + FRIEND_TEST_ALL_PREFIXES( + OveruseFrameDetectorTest, ConstantOveruseGivesNoNormalUsage); + FRIEND_TEST_ALL_PREFIXES(OveruseFrameDetectorTest, LastCaptureJitter); + + void set_min_process_count_before_reporting(int64_t count) { + min_process_count_before_reporting_ = count; + } + + class EncodeTimeAvg; + class EncodeUsage; + class CaptureQueueDelay; + bool IsOverusing(); bool IsUnderusing(int64_t time_now); + bool DetectFrameTimeout(int64_t now) const; + // Protecting all members. scoped_ptr crit_; @@ -86,11 +134,14 @@ class OveruseFrameDetector : public Module { const float normaluse_stddev_ms_; const float overuse_stddev_ms_; + int64_t min_process_count_before_reporting_; + // Observer getting overuse reports. CpuOveruseObserver* observer_; Clock* clock_; int64_t next_process_time_; + int64_t num_process_times_; Statistics capture_deltas_; int64_t last_capture_time_; @@ -105,6 +156,14 @@ class OveruseFrameDetector : public Module { // Number of pixels of last captured frame. int num_pixels_; + int last_capture_jitter_ms_; + + int64_t last_encode_sample_ms_; + scoped_ptr encode_time_; + scoped_ptr encode_usage_; + + scoped_ptr capture_queue_delay_; + DISALLOW_COPY_AND_ASSIGN(OveruseFrameDetector); }; diff --git a/media/webrtc/trunk/webrtc/video_engine/overuse_frame_detector_unittest.cc b/media/webrtc/trunk/webrtc/video_engine/overuse_frame_detector_unittest.cc index d19956928aff..8d45fdb23cce 100644 --- a/media/webrtc/trunk/webrtc/video_engine/overuse_frame_detector_unittest.cc +++ b/media/webrtc/trunk/webrtc/video_engine/overuse_frame_detector_unittest.cc @@ -50,11 +50,11 @@ class OveruseFrameDetectorTest : public ::testing::Test { EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1); - InsertFramesWithInterval(50, regular_frame_interval_ms); + InsertFramesWithInterval(200, regular_frame_interval_ms); InsertFramesWithInterval(50, 110); overuse_detector_->Process(); - InsertFramesWithInterval(50, regular_frame_interval_ms); + InsertFramesWithInterval(200, regular_frame_interval_ms); InsertFramesWithInterval(50, 110); overuse_detector_->Process(); } @@ -74,25 +74,112 @@ class OveruseFrameDetectorTest : public ::testing::Test { }; TEST_F(OveruseFrameDetectorTest, TriggerOveruse) { + overuse_detector_->set_min_process_count_before_reporting(0); TriggerOveruse(); } TEST_F(OveruseFrameDetectorTest, OveruseAndRecover) { + overuse_detector_->set_min_process_count_before_reporting(0); TriggerOveruse(); TriggerNormalUsage(); } TEST_F(OveruseFrameDetectorTest, DoubleOveruseAndRecover) { + overuse_detector_->set_min_process_count_before_reporting(0); TriggerOveruse(); TriggerOveruse(); TriggerNormalUsage(); } +TEST_F(OveruseFrameDetectorTest, TriggerNormalUsageWithMinProcessCount) { + overuse_detector_->set_min_process_count_before_reporting(1); + InsertFramesWithInterval(900, 33); + overuse_detector_->Process(); + EXPECT_EQ(-1, overuse_detector_->last_capture_jitter_ms()); + clock_->AdvanceTimeMilliseconds(5000); + overuse_detector_->Process(); + EXPECT_GT(overuse_detector_->last_capture_jitter_ms(), 0); +} + TEST_F(OveruseFrameDetectorTest, ConstantOveruseGivesNoNormalUsage) { + overuse_detector_->set_min_process_count_before_reporting(0); EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(0); for(size_t i = 0; i < 64; ++i) TriggerOveruse(); } +TEST_F(OveruseFrameDetectorTest, LastCaptureJitter) { + overuse_detector_->set_min_process_count_before_reporting(0); + EXPECT_EQ(-1, overuse_detector_->last_capture_jitter_ms()); + TriggerOveruse(); + EXPECT_GT(overuse_detector_->last_capture_jitter_ms(), 0); +} + +TEST_F(OveruseFrameDetectorTest, NoCaptureQueueDelay) { + EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 0); + overuse_detector_->FrameCaptured(320, 180); + overuse_detector_->FrameProcessingStarted(); + EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 0); +} + +TEST_F(OveruseFrameDetectorTest, CaptureQueueDelay) { + overuse_detector_->FrameCaptured(320, 180); + clock_->AdvanceTimeMilliseconds(100); + overuse_detector_->FrameProcessingStarted(); + EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 100); +} + +TEST_F(OveruseFrameDetectorTest, CaptureQueueDelayMultipleFrames) { + overuse_detector_->FrameCaptured(320, 180); + clock_->AdvanceTimeMilliseconds(10); + overuse_detector_->FrameCaptured(320, 180); + clock_->AdvanceTimeMilliseconds(20); + + overuse_detector_->FrameProcessingStarted(); + EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 30); + overuse_detector_->FrameProcessingStarted(); + EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 20); +} + +TEST_F(OveruseFrameDetectorTest, CaptureQueueDelayResetAtResolutionSwitch) { + overuse_detector_->FrameCaptured(320, 180); + clock_->AdvanceTimeMilliseconds(10); + overuse_detector_->FrameCaptured(321, 180); + clock_->AdvanceTimeMilliseconds(20); + + overuse_detector_->FrameProcessingStarted(); + EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 20); +} + +TEST_F(OveruseFrameDetectorTest, CaptureQueueDelayNoMatchingCapturedFrame) { + overuse_detector_->FrameCaptured(320, 180); + clock_->AdvanceTimeMilliseconds(100); + overuse_detector_->FrameProcessingStarted(); + EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 100); + // No new captured frame. The last delay should be reported. + overuse_detector_->FrameProcessingStarted(); + EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 100); +} + +TEST_F(OveruseFrameDetectorTest, EncodedFrame) { + const int kInitialAvgEncodeTimeInMs = 5; + EXPECT_EQ(kInitialAvgEncodeTimeInMs, overuse_detector_->AvgEncodeTimeMs()); + for (int i = 0; i < 30; i++) { + clock_->AdvanceTimeMilliseconds(33); + overuse_detector_->FrameEncoded(2); + } + EXPECT_EQ(2, overuse_detector_->AvgEncodeTimeMs()); +} + +TEST_F(OveruseFrameDetectorTest, EncodedUsage) { + for (int i = 0; i < 30; i++) { + overuse_detector_->FrameCaptured(320, 180); + clock_->AdvanceTimeMilliseconds(5); + overuse_detector_->FrameEncoded(5); + clock_->AdvanceTimeMilliseconds(33-5); + } + EXPECT_EQ(15, overuse_detector_->EncodeUsagePercent()); +} + } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/video_engine/stream_synchronization.cc b/media/webrtc/trunk/webrtc/video_engine/stream_synchronization.cc index 91cd7638d02e..6192dfa5bf22 100644 --- a/media/webrtc/trunk/webrtc/video_engine/stream_synchronization.cc +++ b/media/webrtc/trunk/webrtc/video_engine/stream_synchronization.cc @@ -12,7 +12,7 @@ #include #include -#include +#include #include diff --git a/media/webrtc/trunk/webrtc/video_engine/test/android/AndroidManifest.xml b/media/webrtc/trunk/webrtc/video_engine/test/android/AndroidManifest.xml deleted file mode 100644 index 39a3c515e456..000000000000 --- a/media/webrtc/trunk/webrtc/video_engine/test/android/AndroidManifest.xml +++ /dev/null @@ -1,28 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - diff --git a/media/webrtc/trunk/webrtc/video_engine/test/android/OWNERS b/media/webrtc/trunk/webrtc/video_engine/test/android/OWNERS deleted file mode 100644 index ec8dd5331fa8..000000000000 --- a/media/webrtc/trunk/webrtc/video_engine/test/android/OWNERS +++ /dev/null @@ -1,2 +0,0 @@ -leozwang@webrtc.org -fischman@webrtc.org diff --git a/media/webrtc/trunk/webrtc/video_engine/test/android/android_video_demo.gypi b/media/webrtc/trunk/webrtc/video_engine/test/android/android_video_demo.gypi deleted file mode 100644 index fa53e511025b..000000000000 --- a/media/webrtc/trunk/webrtc/video_engine/test/android/android_video_demo.gypi +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -{ - 'targets': [ - { - 'target_name': 'libwebrtc-video-demo-jni', - 'type': 'loadable_module', - 'dependencies': [ - '<(webrtc_root)/modules/modules.gyp:*', - '<(webrtc_root)/test/test.gyp:channel_transport', - '<(webrtc_root)/video_engine/video_engine.gyp:video_engine_core', - '<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine', - ], - 'sources': [ - 'jni/android_media_codec_decoder.cc', - 'jni/vie_android_java_api.cc', - ], - 'link_settings': { - 'libraries': [ - '-llog', - '-lGLESv2', - '-lOpenSLES', - ], - } - }, - { - 'target_name': 'WebRTCDemo', - 'type': 'none', - 'dependencies': [ - 'libwebrtc-video-demo-jni', - '<(modules_java_gyp_path):*', - ], - 'actions': [ - { - # TODO(yujie.mao): Convert building of the demo to a proper GYP target - # so this action is not needed once chromium's apk-building machinery - # can be used. (crbug.com/225101) - 'action_name': 'build_webrtcdemo_apk', - 'variables': { - 'android_webrtc_demo_root': '<(webrtc_root)/video_engine/test/android', - }, - 'inputs' : [ - '<(PRODUCT_DIR)/lib.java/audio_device_module_java.jar', - '<(PRODUCT_DIR)/lib.java/video_capture_module_java.jar', - '<(PRODUCT_DIR)/lib.java/video_render_module_java.jar', - '<(PRODUCT_DIR)/libwebrtc-video-demo-jni.so', - ' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - @{elseText} - - - - - - - - - - - - - - - - - - - - - @{elseText} - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Running tests ... - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Uninstalling @{app.package} from the default emulator or device... - - - - - - - - - - - - - - - - - - - - - - - - - Project Name: ${ant.project.name} - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Switching between debug and non debug build: Deleting previous compilation output... - - - - - - - - - - - - - Switching from instrumented to non-instrumented build: Deleting previous compilation output... - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Resolving Build Target for ${ant.project.name}... - - - - - - - ---------- - Creating output directories if needed... - - - - - - - - - - ---------- - Resolving Dependencies for ${ant.project.name}... - - - - - - - - - - - - - - - - - ---------- - Building Libraries with '${project.libraries.target}'... - - - - - - - - - - - - - - - - - - - ---------- - Building tested project at ${tested.project.absolute.dir} with '${tested.project.target}'... - - - - - - - - - - - - - - - - - - - - - - - - - - Handling aidl files... - - - - - - ---------- - Handling RenderScript files... - - - - - ---------- - Handling Resources... - - - - - - ---------- - Handling BuildConfig class... - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Instrumenting classes from ${out.absolute.dir}/classes... - - - - - - - - - - - - - - - - - - - - - Creating library output jar file... - - - - - - - Custom jar packaging exclusion: ${android.package.excludes} - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -include "${proguard.configcmd}" - -include "${out.absolute.dir}/proguard.txt" - -injars ${project.all.classes.value} - -outjars "${obfuscated.jar.file}" - -libraryjars ${project.target.classpath.value} - -dump "${obfuscate.absolute.dir}/dump.txt" - -printseeds "${obfuscate.absolute.dir}/seeds.txt" - -printusage "${obfuscate.absolute.dir}/usage.txt" - -printmapping "${obfuscate.absolute.dir}/mapping.txt" - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Debug Package: ${out.final.file} - - - - - - - - - - - - - - - - - - - - - - - - - - - No key.store and key.alias properties found in build.properties. - Please sign ${out.packaged.file} manually - and run zipalign from the Android SDK tools. - - - - - - - proguard.config is ${proguard.config} - - - - - - - - - Proguard.config is enabled - - - - - - - - - - - - - - - - - - - - - - - - - - - - ************************************************* - **** Android Manifest has debuggable=true **** - **** Doing DEBUG packaging with RELEASE keys **** - ************************************************* - - - - - - - - - - - - - - - - Signing final apk... - - - - - Release Package: ${out.final.file} - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Instrumented Package: ${out.final.file} - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - WARNING: Code Coverage is currently only supported on the emulator and rooted devices. - - - - - - - - Downloading coverage file into project directory... - - - - - - - Extracting coverage report... - - - - - - - - - - - - - - - - - - - Cleaning up temporary files... - - - Saving the report file in ${out.absolute.dir}/coverage.html - - - - - - - - - - - - - - - - - - - - - - - - - - - - Installing ${out.final.file} onto default emulator or device... - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Install file not specified. - - 'ant install' now requires the build target to be specified as well. - - - ant debug install - ant release install - ant instrument install - This will build the given package and install it. - - Alternatively, you can use - ant installd - ant installr - ant installi - ant installt - to only install an existing package (this will not rebuild the package.) - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Android Ant Build. Available targets: - help: Displays this help. - clean: Removes output files created by other targets. - The 'all' target can be used to clean dependencies - (tested projects and libraries)at the same time - using: 'ant all clean' - debug: Builds the application and signs it with a debug key. - The 'nodeps' target can be used to only build the - current project and ignore the libraries using: - 'ant nodeps debug' - release: Builds the application. The generated apk file must be - signed before it is published. - The 'nodeps' target can be used to only build the - current project and ignore the libraries using: - 'ant nodeps release' - instrument:Builds an instrumented package and signs it with a - debug key. - test: Runs the tests. Project must be a test project and - must have been built. Typical usage would be: - ant [emma] debug install test - emma: Transiently enables code coverage for subsequent - targets. - install: Installs the newly build package. Must either be used - in conjunction with a build target (debug/release/ - instrument) or with the proper suffix indicating - which package to install (see below). - If the application was previously installed, the - application is reinstalled if the signature matches. - installd: Installs (only) the debug package. - installr: Installs (only) the release package. - installi: Installs (only) the instrumented package. - installt: Installs (only) the test and tested packages (unless - nodeps is used as well. - uninstall: Uninstalls the application from a running emulator or - device. Also uninstall tested package if applicable - unless 'nodeps' is used as well. - - diff --git a/media/webrtc/trunk/webrtc/video_engine/test/android/jni/android_media_codec_decoder.cc b/media/webrtc/trunk/webrtc/video_engine/test/android/jni/android_media_codec_decoder.cc deleted file mode 100644 index 9e45c8e7f720..000000000000 --- a/media/webrtc/trunk/webrtc/video_engine/test/android/jni/android_media_codec_decoder.cc +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#define LOG_TAG "AndroidMediaCodecDecoder" - -#include - -#include "webrtc/video_engine/test/android/jni/android_media_codec_decoder.h" - -namespace webrtc { - -AndroidMediaCodecDecoder::AndroidMediaCodecDecoder( - JavaVM* vm, jobject surface, jclass decoderClass) - : vm_(vm), - surface_(NULL), - mediaCodecDecoder_(NULL), - decoderClass_(NULL), - env_(NULL), - setEncodedImageID_(NULL), - vm_attached_(false) { - Initialize(vm, surface, decoderClass); -} - -AndroidMediaCodecDecoder::~AndroidMediaCodecDecoder() { - env_->DeleteGlobalRef(decoderClass_); - env_->DeleteGlobalRef(surface_); -} - -void AndroidMediaCodecDecoder::Initialize( - JavaVM* vm, jobject surface, jclass decoderClass) { - int ret = vm->GetEnv(reinterpret_cast(&env_), JNI_VERSION_1_4); - if ((ret < 0) || !env_) { - __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, - "Could not get JNI env (%d, %p)", ret, env_); - assert(false); - } - surface_ = env_->NewGlobalRef(surface); - decoderClass_ = reinterpret_cast(env_->NewGlobalRef(decoderClass)); -} - -int32_t AndroidMediaCodecDecoder::InitDecode( - const VideoCodec* codecSettings, int32_t numberOfCores) { - __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, "%s", __func__); - - // TODO(dwkang): Detach this thread from VM. => this leads to a crash on - // "StopCall". - int ret = vm_->AttachCurrentThread(&env_, NULL); - // Get the JNI env for this thread - if ((ret < 0) || !env_) { - __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, - "Could not attach thread to JVM (%d, %p)", ret, - env_); - return WEBRTC_VIDEO_CODEC_UNINITIALIZED; - } else { - vm_attached_ = true; - } - - // Initialize the media codec java decoder class. - jmethodID mid = env_->GetMethodID(decoderClass_, "", "()V"); - mediaCodecDecoder_ = env_->NewGlobalRef(env_->NewObject(decoderClass_, mid)); - - mid = env_->GetMethodID( - decoderClass_, "configure", "(Landroid/view/SurfaceView;II)V"); - env_->CallVoidMethod(mediaCodecDecoder_, mid, surface_, - codecSettings->width, codecSettings->height); - - setEncodedImageID_ = env_->GetMethodID( - decoderClass_, "setEncodedImage", "(Ljava/nio/ByteBuffer;J)V"); - - // Call start() - jmethodID startID = env_->GetMethodID(decoderClass_, "start", "()V"); - env_->CallVoidMethod(mediaCodecDecoder_, startID); - return WEBRTC_VIDEO_CODEC_OK; -} - -int32_t AndroidMediaCodecDecoder::Decode( - const EncodedImage& inputImage, - bool missingFrames, - const RTPFragmentationHeader* fragmentation, - const CodecSpecificInfo* codecSpecificInfo, - int64_t renderTimeMs) { - if (!vm_attached_) { - return WEBRTC_VIDEO_CODEC_UNINITIALIZED; - } - - jobject byteBuffer = - env_->NewDirectByteBuffer(inputImage._buffer, inputImage._length); - env_->CallVoidMethod( - mediaCodecDecoder_, setEncodedImageID_, byteBuffer, renderTimeMs); - env_->DeleteLocalRef(byteBuffer); - - return WEBRTC_VIDEO_CODEC_NO_OUTPUT; -} - -int32_t AndroidMediaCodecDecoder::RegisterDecodeCompleteCallback( - DecodedImageCallback* callback) { - __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, "%s", __func__); - return WEBRTC_VIDEO_CODEC_OK; -} - -int32_t AndroidMediaCodecDecoder::Release() { - __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, "%s", __func__); - env_->DeleteGlobalRef(mediaCodecDecoder_); - mediaCodecDecoder_ = NULL; - - return WEBRTC_VIDEO_CODEC_OK; -} - -int32_t AndroidMediaCodecDecoder::Reset() { - __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, "%s", __func__); - return WEBRTC_VIDEO_CODEC_OK; -} - -} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/video_engine/test/android/jni/android_media_codec_decoder.h b/media/webrtc/trunk/webrtc/video_engine/test/android/jni/android_media_codec_decoder.h deleted file mode 100644 index 5fd242142d5e..000000000000 --- a/media/webrtc/trunk/webrtc/video_engine/test/android/jni/android_media_codec_decoder.h +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef WEBRTC_VIDEO_ENGINE_TEST_ANDROID_JNI_ANDROID_MEDIA_CODEC_DECODER_H_ -#define WEBRTC_VIDEO_ENGINE_TEST_ANDROID_JNI_ANDROID_MEDIA_CODEC_DECODER_H_ - -#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" - -namespace webrtc { - -class AndroidMediaCodecDecoder : public VideoDecoder { - public: - AndroidMediaCodecDecoder(JavaVM* vm, jobject surface, jclass decoderClass); - virtual ~AndroidMediaCodecDecoder(); - - // Initialize the decoder with the information from the VideoCodec. - // - // Input: - // - inst : Codec settings - // - numberOfCores : Number of cores available for the decoder - // - // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise. - virtual int32_t InitDecode( - const VideoCodec* codecSettings, int32_t numberOfCores); - - // Decode encoded image (as a part of a video stream). The decoded image - // will be returned to the user through the decode complete callback. - // - // Input: - // - inputImage : Encoded image to be decoded - // - missingFrames : True if one or more frames have been lost - // since the previous decode call. - // - fragmentation : Specifies where the encoded frame can be - // split into separate fragments. The meaning - // of fragment is codec specific, but often - // means that each fragment is decodable by - // itself. - // - codecSpecificInfo : Pointer to codec specific data - // - renderTimeMs : System time to render in milliseconds. Only - // used by decoders with internal rendering. - // - // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise. - virtual int32_t - Decode(const EncodedImage& inputImage, - bool missingFrames, - const RTPFragmentationHeader* fragmentation, - const CodecSpecificInfo* codecSpecificInfo = NULL, - int64_t renderTimeMs = -1); - - // Register an decode complete callback object. - // - // Input: - // - callback : Callback object which handles decoded images. - // - // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise. - virtual int32_t RegisterDecodeCompleteCallback( - DecodedImageCallback* callback); - - // Free decoder memory. - // - // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise. - virtual int32_t Release(); - - // Reset decoder state and prepare for a new call. - // - // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise. - virtual int32_t Reset(); - - // Codec configuration data sent out-of-band, i.e. in SIP call setup - // - // Input/Output: - // - buffer : Buffer pointer to the configuration data - // - size : The size of the configuration data in - // bytes - // - // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise. - virtual int32_t SetCodecConfigParameters( - const uint8_t* /*buffer*/, int32_t /*size*/) { - return WEBRTC_VIDEO_CODEC_ERROR; - } - - // Create a copy of the codec and its internal state. - // - // Return value : A copy of the instance if OK, NULL otherwise. - virtual VideoDecoder* Copy() { return NULL; } - - private: - void Initialize(JavaVM* vm, jobject surface, jclass decoderClass); - - JavaVM* vm_; - jobject surface_; - jobject mediaCodecDecoder_; - jclass decoderClass_; - JNIEnv* env_; - jmethodID setEncodedImageID_; - bool vm_attached_; -}; - -} // namespace webrtc - -#endif // WEBRTC_VIDEO_ENGINE_TEST_ANDROID_JNI_ANDROID_MEDIA_CODEC_DECODER_H_ diff --git a/media/webrtc/trunk/webrtc/video_engine/test/android/jni/org_webrtc_videoengineapp_vie_android_java_api.h b/media/webrtc/trunk/webrtc/video_engine/test/android/jni/org_webrtc_videoengineapp_vie_android_java_api.h deleted file mode 100644 index 7fad82b410c4..000000000000 --- a/media/webrtc/trunk/webrtc/video_engine/test/android/jni/org_webrtc_videoengineapp_vie_android_java_api.h +++ /dev/null @@ -1,495 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* DO NOT EDIT THIS FILE - it is machine generated */ -#include -/* Header for class org_webrtc_videoengineapp_ViEAndroidJavaAPI */ - -#ifndef _Included_org_webrtc_videoengineapp_ViEAndroidJavaAPI -#define _Included_org_webrtc_videoengineapp_ViEAndroidJavaAPI -#ifdef __cplusplus -extern "C" { -#endif -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: NativeInit - * Signature: (Landroid/content/Context;)Z - */ -JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_NativeInit - (JNIEnv *, jobject, jobject); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: GetVideoEngine - * Signature: ()I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetVideoEngine - (JNIEnv *, jobject); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: Init - * Signature: (Z)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Init - (JNIEnv *, jobject, jboolean); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: Terminate - * Signature: ()I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Terminate - (JNIEnv *, jobject); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: StartSend - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartSend - (JNIEnv *, jobject, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: StopRender - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopRender - (JNIEnv *, jobject, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: StopSend - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopSend - (JNIEnv *, jobject, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: StartReceive - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartReceive - (JNIEnv *, jobject, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: StopReceive - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopReceive - (JNIEnv *, jobject, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: CreateChannel - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_CreateChannel - (JNIEnv *, jobject, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: SetLocalReceiver - * Signature: (II)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetLocalReceiver - (JNIEnv *, jobject, jint, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: SetSendDestination - * Signature: (IILjava/lang/String;)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetSendDestination - (JNIEnv *, jobject, jint, jint, jstring); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: GetCodecs - * Signature: ()[Ljava/lang/String; - */ -JNIEXPORT jobjectArray JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetCodecs - (JNIEnv *, jobject); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: SetReceiveCodec - * Signature: (IIIIII)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetReceiveCodec - (JNIEnv *, jobject, jint, jint, jint, jint, jint, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: SetSendCodec - * Signature: (IIIIII)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetSendCodec - (JNIEnv *, jobject, jint, jint, jint, jint, jint, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: AddRemoteRenderer - * Signature: (ILjava/lang/Object;)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_AddRemoteRenderer - (JNIEnv *, jobject, jint, jobject); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: RemoveRemoteRenderer - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_RemoveRemoteRenderer - (JNIEnv *, jobject, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: StartRender - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartRender - (JNIEnv *, jobject, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: StartCamera - * Signature: (II)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartCamera - (JNIEnv *, jobject, jint, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: StopCamera - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopCamera - (JNIEnv *, jobject, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: GetCameraOrientation - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetCameraOrientation - (JNIEnv *, jobject, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: SetRotation - * Signature: (II)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetRotation - (JNIEnv *, jobject, jint, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: SetExternalMediaCodecDecoderRenderer - * Signature: (ILjava/lang/Object;)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetExternalMediaCodecDecoderRenderer - (JNIEnv *, jobject, jint, jobject); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: EnableNACK - * Signature: (IZ)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_EnableNACK - (JNIEnv *, jobject, jint, jboolean); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: EnablePLI - * Signature: (IZ)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_EnablePLI - (JNIEnv *, jobject, jint, jboolean); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: SetCallback - * Signature: (ILorg/webrtc/videoengineapp/IViEAndroidCallback;)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetCallback - (JNIEnv *, jobject, jint, jobject); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: StartIncomingRTPDump - * Signature: (ILjava/lang/String;)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartIncomingRTPDump - (JNIEnv *, jobject, jint, jstring); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: StopIncomingRTPDump - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopIncomingRTPDump - (JNIEnv *, jobject, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_Create - * Signature: (Landroid/content/Context;)Z - */ -JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Create - (JNIEnv *, jobject, jobject); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_Delete - * Signature: ()Z - */ -JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Delete - (JNIEnv *, jobject); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_Init - * Signature: (Z)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Init - (JNIEnv *, jobject, jboolean); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_Terminate - * Signature: ()I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Terminate - (JNIEnv *, jobject); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_CreateChannel - * Signature: ()I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1CreateChannel - (JNIEnv *, jobject); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_DeleteChannel - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1DeleteChannel - (JNIEnv *, jobject, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: ViE_DeleteChannel - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_ViE_1DeleteChannel - (JNIEnv *, jobject, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_SetLocalReceiver - * Signature: (II)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetLocalReceiver - (JNIEnv *, jobject, jint, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_SetSendDestination - * Signature: (IILjava/lang/String;)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetSendDestination - (JNIEnv *, jobject, jint, jint, jstring); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StartListen - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartListen - (JNIEnv *, jobject, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StartPlayout - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartPlayout - (JNIEnv *, jobject, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StartSend - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartSend - (JNIEnv *, jobject, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StopListen - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopListen - (JNIEnv *, jobject, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StopPlayout - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopPlayout - (JNIEnv *, jobject, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StopSend - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopSend - (JNIEnv *, jobject, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_SetSpeakerVolume - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetSpeakerVolume - (JNIEnv *, jobject, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_SetLoudspeakerStatus - * Signature: (Z)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetLoudspeakerStatus - (JNIEnv *, jobject, jboolean); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StartPlayingFileLocally - * Signature: (ILjava/lang/String;Z)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartPlayingFileLocally - (JNIEnv *, jobject, jint, jstring, jboolean); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StopPlayingFileLocally - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopPlayingFileLocally - (JNIEnv *, jobject, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StartPlayingFileAsMicrophone - * Signature: (ILjava/lang/String;Z)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartPlayingFileAsMicrophone - (JNIEnv *, jobject, jint, jstring, jboolean); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StopPlayingFileAsMicrophone - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopPlayingFileAsMicrophone - (JNIEnv *, jobject, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_NumOfCodecs - * Signature: ()I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1NumOfCodecs - (JNIEnv *, jobject); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_GetCodecs - * Signature: ()[Ljava/lang/String; - */ -JNIEXPORT jobjectArray JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1GetCodecs - (JNIEnv *, jobject); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_SetSendCodec - * Signature: (II)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetSendCodec - (JNIEnv *, jobject, jint, jint); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_SetECStatus - * Signature: (Z)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetECStatus - (JNIEnv *, jobject, jboolean); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_SetAGCStatus - * Signature: (Z)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetAGCStatus - (JNIEnv *, jobject, jboolean); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_SetNSStatus - * Signature: (Z)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetNSStatus - (JNIEnv *, jobject, jboolean); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StartDebugRecording - * Signature: (Ljava/lang/String;)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartDebugRecording - (JNIEnv *, jobject, jstring); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StopDebugRecording - * Signature: ()I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopDebugRecording - (JNIEnv *, jobject); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StartIncomingRTPDump - * Signature: (ILjava/lang/String;)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartIncomingRTPDump - (JNIEnv *, jobject, jint, jstring); - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StopIncomingRTPDump - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopIncomingRTPDump - (JNIEnv *, jobject, jint); - -#ifdef __cplusplus -} -#endif -#endif diff --git a/media/webrtc/trunk/webrtc/video_engine/test/android/jni/vie_android_java_api.cc b/media/webrtc/trunk/webrtc/video_engine/test/android/jni/vie_android_java_api.cc deleted file mode 100644 index d1475d204acc..000000000000 --- a/media/webrtc/trunk/webrtc/video_engine/test/android/jni/vie_android_java_api.cc +++ /dev/null @@ -1,2008 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#include -#include - -#include "webrtc/video_engine/test/android/jni/org_webrtc_videoengineapp_vie_android_java_api.h" - -#include "webrtc/voice_engine/include/voe_audio_processing.h" -#include "webrtc/voice_engine/include/voe_base.h" -#include "webrtc/voice_engine/include/voe_codec.h" -#include "webrtc/voice_engine/include/voe_file.h" -#include "webrtc/voice_engine/include/voe_hardware.h" -#include "webrtc/voice_engine/include/voe_network.h" -#include "webrtc/voice_engine/include/voe_rtp_rtcp.h" -#include "webrtc/voice_engine/include/voe_volume_control.h" - -#include "webrtc/video_engine/include/vie_base.h" -#include "webrtc/video_engine/include/vie_capture.h" -#include "webrtc/video_engine/include/vie_codec.h" -#include "webrtc/video_engine/include/vie_external_codec.h" -#include "webrtc/video_engine/include/vie_network.h" -#include "webrtc/video_engine/include/vie_render.h" -#include "webrtc/video_engine/include/vie_rtp_rtcp.h" - -#include "webrtc/common_types.h" -#include "webrtc/video_engine/test/android/jni/android_media_codec_decoder.h" - -#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h" -#include "webrtc/system_wrappers/interface/scoped_ptr.h" -#include "webrtc/test/channel_transport/include/channel_transport.h" - -#define WEBRTC_LOG_TAG "*WEBRTCN*" -#define VALIDATE_BASE_POINTER \ - if (!voeData.base) \ - { \ - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \ - "Base pointer doesn't exist"); \ - return -1; \ - } -#define VALIDATE_CODEC_POINTER \ - if (!voeData.codec) \ - { \ - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \ - "Codec pointer doesn't exist"); \ - return -1; \ - } -#define VALIDATE_FILE_POINTER \ - if (!voeData.file) \ - { \ - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \ - "File pointer doesn't exist"); \ - return -1; \ - } -#define VALIDATE_APM_POINTER \ - if (!voeData.codec) \ - { \ - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \ - "Apm pointer doesn't exist"); \ - return -1; \ - } -#define VALIDATE_HARDWARE_POINTER \ - if (!voeData.hardware) \ - { \ - __android_log_write( \ - ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \ - "Hardware pointer doesn't exist"); \ - return -1; \ - } -#define VALIDATE_VOLUME_POINTER \ - if (!voeData.volume) \ - { \ - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \ - "Volume pointer doesn't exist"); \ - return -1; \ - } - -#define VALIDATE_RTP_POINTER \ - if (!voeData.rtp) \ - { \ - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \ - "rtp pointer doesn't exist"); \ - return -1; \ - } - -using namespace webrtc; - -//Forward declaration. -class VideoCallbackAndroid; - -// VoiceEngine data struct -typedef struct -{ - // VoiceEngine - VoiceEngine* ve; - // Sub-APIs - VoEBase* base; - VoECodec* codec; - VoEFile* file; - VoENetwork* netw; - VoEAudioProcessing* apm; - VoEVolumeControl* volume; - VoEHardware* hardware; - VoERTP_RTCP* rtp; - - JavaVM* jvm; - scoped_ptr transport; -} VoiceEngineData; - -class AndroidVideoRenderCallback; -// VideoEngine data struct -typedef struct -{ - VideoEngine* vie; - ViEBase* base; - ViECodec* codec; - ViENetwork* netw; - ViERTP_RTCP* rtp; - ViERender* render; - ViECapture* capture; - ViEExternalCodec* externalCodec; - - VideoCallbackAndroid* callback; - scoped_ptr transport; -} VideoEngineData; - -// Global variables -JavaVM* webrtcGlobalVM; - -// Global variables visible in this file -static VoiceEngineData voeData; -static VideoEngineData vieData; - -// "Local" functions (i.e. not Java accessible) -#define WEBRTC_TRACE_MAX_MESSAGE_SIZE 1024 -static bool VE_GetSubApis(); -static bool VE_ReleaseSubApis(); - -#define CHECK_API_RETURN(ret) \ - if (ret!=0) \ - { \ - __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \ - "Return error %d",ret); \ - break; \ - } - -class VideoCallbackAndroid: public ViEDecoderObserver, - public ViEEncoderObserver -{ - - // Implements ViEDecoderObserver - virtual void IncomingRate(const int videoChannel, - const unsigned int framerate, - const unsigned int bitrate) - { - // Let's print out the network statistics from this call back as well - unsigned short fraction_lost; - unsigned int dummy; - int intdummy; - _vieData.rtp->GetReceivedRTCPStatistics(videoChannel, fraction_lost, - dummy, dummy, dummy, intdummy); - unsigned short packetLossRate = 0; - if (fraction_lost > 0) - { - // Change from frac to % - packetLossRate = (fraction_lost * 100) >> 8; - } - - JNIEnv* threadEnv = NULL; - int ret = webrtcGlobalVM->AttachCurrentThread(&threadEnv, NULL); - // Get the JNI env for this thread - if ((ret < 0) || !threadEnv) - { - __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, - "Could not attach thread to JVM (%d, %p)", ret, - threadEnv); - return; - } - threadEnv->CallIntMethod(_callbackObj, _callbackId, framerate, bitrate, - packetLossRate, _frameRateO, _bitRateO); - webrtcGlobalVM->DetachCurrentThread(); - } - ; - - virtual void IncomingCodecChanged(const int videoChannel, - const webrtc::VideoCodec& videoCodec) - { - JNIEnv* threadEnv = NULL; - int ret = webrtcGlobalVM->AttachCurrentThread(&threadEnv, NULL); - // Get the JNI env for this thread - if ((ret < 0) || !threadEnv) - { - __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, - "Could not attach thread to JVM (%d, %p)", ret, - threadEnv); - return; - } - threadEnv->CallIntMethod(_callbackObj, _incomingResolutionId, - videoCodec.width, videoCodec.height); - webrtcGlobalVM->DetachCurrentThread(); - } - ; - - virtual void RequestNewKeyFrame(const int videoChannel) - { - } - ; - - virtual void OutgoingRate(const int videoChannel, - const unsigned int framerate, - const unsigned int bitrate) - { - _frameRateO = framerate; - _bitRateO = bitrate; - //__android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, - // "SendRate frameRate %d bitrate %d\n",frameRate,bitrate); - } - ; - -public: - VideoEngineData& _vieData; - JNIEnv * _env; - jobject _callbackObj; - jclass _callbackCls; - jmethodID _callbackId; - jmethodID _incomingResolutionId; - int _frameRateO, _bitRateO; - VideoCallbackAndroid(VideoEngineData& vieData, JNIEnv * env, - jobject callback) : - _vieData(vieData), _env(env), _callbackObj(callback), - _frameRateO(0), _bitRateO(0) { - _callbackCls = _env->GetObjectClass(_callbackObj); - _callbackId - = _env->GetMethodID(_callbackCls, "updateStats", "(IIIII)I"); - _incomingResolutionId - = _env->GetMethodID(_callbackCls, "newIncomingResolution", "(II)I"); - if (_callbackId == NULL) { - __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Failed to get jid"); - } - _callbackObj = _env->NewGlobalRef(_callbackObj); - } -}; - -// JNI_OnLoad -jint JNI_OnLoad(JavaVM* vm, void* reserved) { - webrtcGlobalVM = vm; - if (!webrtcGlobalVM) - { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "JNI_OnLoad did not receive a valid VM pointer"); - return -1; - } - - // Get JNI - JNIEnv* env; - if (JNI_OK != vm->GetEnv(reinterpret_cast (&env), - JNI_VERSION_1_4)) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "JNI_OnLoad could not get JNI env"); - return -1; - } - - // Init VoiceEngine data - memset(&voeData, 0, sizeof(voeData)); - // Store the JVM - voeData.jvm = vm; - - // Init VideoEngineData data - memset(&vieData, 0, sizeof(vieData)); - - return JNI_VERSION_1_4; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: NativeInit - * Signature: (Landroid/content/Context;)Z - */ -JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_NativeInit( - JNIEnv * env, - jobject, - jobject context) -{ - return true; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: GetVideoEngine - * Signature: ()I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetVideoEngine( - JNIEnv *, - jobject context) { - - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "GetVideoEngine"); - - // Check if already got - if (vieData.vie) { - __android_log_write(ANDROID_LOG_INFO, WEBRTC_LOG_TAG, - "ViE already got"); - return 0; - } - - VideoEngine::SetAndroidObjects(webrtcGlobalVM, context); - - // Create - vieData.vie = VideoEngine::Create(); - if (!vieData.vie) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, "Get ViE failed"); - return -1; - } - vieData.base = ViEBase::GetInterface(vieData.vie); - if (!vieData.base) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Get base sub-API failed"); - return -1; - } - - vieData.codec = ViECodec::GetInterface(vieData.vie); - if (!vieData.codec) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Get codec sub-API failed"); - return -1; - } - - vieData.netw = ViENetwork::GetInterface(vieData.vie); - if (!vieData.netw) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Get network sub-API failed"); - return -1; - } - - vieData.rtp = ViERTP_RTCP::GetInterface(vieData.vie); - if (!vieData.rtp) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Get RTP sub-API failed"); - return -1; - } - - vieData.render = ViERender::GetInterface(vieData.vie); - if (!vieData.render) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Get Render sub-API failed"); - return -1; - } - - vieData.capture = ViECapture::GetInterface(vieData.vie); - if (!vieData.capture) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Get Capture sub-API failed"); - return -1; - } - - vieData.externalCodec = ViEExternalCodec::GetInterface(vieData.vie); - if (!vieData.capture) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Get External Codec sub-API failed"); - return -1; - } - - return 0; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: Init - * Signature: (Z)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Init( - JNIEnv *, - jobject, - jboolean enableTrace) -{ - if (vieData.vie) { - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "Init"); - - int ret = vieData.base->Init(); - __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, - "Init return %d", ret); - if (enableTrace) - { - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, - "SetTraceFile"); - if (0 != vieData.vie->SetTraceFile(("/sdcard/trace.txt"), false)) - { - __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Video Engine could not enable trace"); - } - - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, - "SetTraceFilter"); - if (0 != vieData.vie->SetTraceFilter(webrtc::kTraceError)) - { - __android_log_write(ANDROID_LOG_WARN, WEBRTC_LOG_TAG, - "Could not set trace filter"); - } - } - else - { - if (0 != vieData.vie->SetTraceFilter(webrtc::kTraceNone)) - { - __android_log_write(ANDROID_LOG_WARN, WEBRTC_LOG_TAG, - "Could not set trace filter"); - } - } - if (voeData.ve) // VoiceEngine is enabled - { - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, - "SetVoiceEngine"); - if (0 != vieData.base->SetVoiceEngine(voeData.ve)) - { - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, - "SetVoiceEngine failed"); - } - } - return ret; - } - else - { - return -1; - } -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: Terminate - * Signature: ()I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Terminate( - JNIEnv *, - jobject) -{ - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "Terminate"); - - if (vieData.vie) { - if (!vieData.rtp || vieData.rtp->Release() != 0) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Failed to release RTP sub-API"); - } - - if (!vieData.netw || vieData.netw->Release() != 0) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Failed to release Network sub-API"); - } - - if (!vieData.codec || vieData.codec->Release() != 0) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Failed to release Codec sub-API"); - } - - if (!vieData.render || vieData.render->Release()) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Failed to release Render sub-API"); - } - - if (!vieData.capture || vieData.capture->Release()) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Failed to release Capture sub-API"); - } - - if (!vieData.base || vieData.base->Release() != 0) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Failed to release Base sub-API"); - } - - if (!vieData.externalCodec || vieData.externalCodec->Release()) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Failed to release External Codec sub-API"); - } - - // Delete Vie - if (!VideoEngine::Delete(vieData.vie)) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Failed to delete ViE "); - return -1; - } - memset(&vieData, 0, sizeof(vieData)); - return 0; - } - else { - return -1; - } -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: StartSend - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartSend( - JNIEnv *, - jobject, - jint channel) -{ - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StartSend"); - - if (vieData.base) { - int ret = vieData.base->StartSend(channel); - return ret; - } - else { - return -1; - } -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: StopRender - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopRender( - JNIEnv *, - jobject, - jint channel) -{ - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StopRender"); - - if (vieData.render) { - return vieData.render->StopRender(channel); - } - else { - return -1; - } -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: StopSend - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopSend( - JNIEnv *, - jobject, - jint channel) -{ - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StopSend"); - - if (vieData.base) { - return vieData.base->StopSend(channel); - } - else { - return -1; - } -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: StartReceive - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartReceive( - JNIEnv *, - jobject, - jint channel) -{ - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StartReceive"); - - if (vieData.base) { - return vieData.base->StartReceive(channel); - } - else { - return -1; - } -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: StopReceive - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopReceive( - JNIEnv *, - jobject, - jint channel) -{ - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StopReceive"); - if (vieData.base) { - return vieData.base->StopReceive(channel); - } - else { - return -1; - } -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: CreateChannel - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_CreateChannel( - JNIEnv *, - jobject, - jint voiceChannel) -{ - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "CreateChannel"); - - if (vieData.vie) { - int channel = 0; - if (vieData.base->CreateChannel(channel) != 0) { - return -1; - } - if (voiceChannel >= 0) { - vieData.base->ConnectAudioChannel(channel, voiceChannel); - } - vieData.transport.reset(new test::VideoChannelTransport(vieData.netw, - channel)); - return channel; - } - else { - return -1; - } -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: SetLocalReceiver - * Signature: (II)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetLocalReceiver( - JNIEnv *, - jobject, - jint channel, - jint port) -{ - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "SetLocalReceiver"); - - if (vieData.transport.get()) { - return vieData.transport->SetLocalReceiver(port); - } - return -1; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: SetSendDestination - * Signature: (IILjava/lang/String)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetSendDestination( - JNIEnv * env, - jobject, - jint channel, - jint port, - jstring ipaddr) -{ - - if (NULL == vieData.vie) - return -1; - - const char* ip = env->GetStringUTFChars(ipaddr, NULL); - if (!ip) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Could not get UTF string"); - return -1; - } - - __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, - "SetSendDestination: channel=%d, port=%d, ip=%s\n", - channel, port, ip); - - if (vieData.transport.get()) { - return vieData.transport->SetSendDestination(ip, port); - } - return -1; -} - - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: SetReceiveCodec - * Signature: (IIIIII)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetReceiveCodec( - JNIEnv *, - jobject, - jint channel, - jint codecNum, - jint intbitRate, - jint width, - jint height, - jint frameRate) -{ - if (NULL == vieData.codec) - return -1; - - //Create codec - webrtc::VideoCodec codec; - vieData.codec->GetCodec(codecNum, codec); - - __android_log_print( - ANDROID_LOG_DEBUG, - WEBRTC_LOG_TAG, - "SetReceiveCodec %s, pltype=%d, bitRate=%d, maxBitRate=%d," - " width=%d, height=%d, frameRate=%d \n", - codec.plName, codec.plType, codec.startBitrate, - codec.maxBitrate, codec.width, codec.height, - codec.maxFramerate); - int ret = vieData.codec->SetReceiveCodec(channel, codec); - __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, - "SetReceiveCodec return %d", ret); - return ret; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: SetSendCodec - * Signature: (IIIIII)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetSendCodec( - JNIEnv *, - jobject, - jint channel, - jint codecNum, - jint intbitRate, - jint width, - jint height, - jint frameRate) -{ - if (NULL == vieData.codec) - return -1; - - //Create codec - webrtc::VideoCodec codec; - vieData.codec->GetCodec(codecNum, codec); - codec.startBitrate = intbitRate; - codec.maxBitrate = 600; - codec.width = width; - codec.height = height; - codec.maxFramerate = frameRate; - - for (int i = 0; i < vieData.codec->NumberOfCodecs(); ++i) { - webrtc::VideoCodec codecToList; - vieData.codec->GetCodec(i, codecToList); - __android_log_print( - ANDROID_LOG_DEBUG, - WEBRTC_LOG_TAG, - "Codec list %s, pltype=%d, bitRate=%d, maxBitRate=%d," - " width=%d, height=%d, frameRate=%d\n", - codecToList.plName, codecToList.plType, - codecToList.startBitrate, codecToList.maxBitrate, - codecToList.width, codecToList.height, - codecToList.maxFramerate); - } - __android_log_print( - ANDROID_LOG_DEBUG, - WEBRTC_LOG_TAG, - "SetSendCodec %s, pltype=%d, bitRate=%d, maxBitRate=%d, " - "width=%d, height=%d, frameRate=%d\n", - codec.plName, codec.plType, codec.startBitrate, - codec.maxBitrate, codec.width, codec.height, - codec.maxFramerate); - - return vieData.codec->SetSendCodec(channel, codec); -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: SetSendCodec - * Signature: ()Z - */ -JNIEXPORT jobjectArray JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetCodecs( - JNIEnv *env, - jobject) -{ - if (NULL == vieData.codec) { - return NULL; - } - - jobjectArray ret; - int num = vieData.codec->NumberOfCodecs(); - char info[32]; - - ret = (jobjectArray)env->NewObjectArray( - num, - env->FindClass("java/lang/String"), - env->NewStringUTF("")); - - for (int i = 0; i < num; ++i) { - webrtc::VideoCodec codecToList; - vieData.codec->GetCodec(i, codecToList); - sprintf(info, "%s pltype:%d", codecToList.plName, codecToList.plType); - env->SetObjectArrayElement(ret, i, env->NewStringUTF( info )); - - __android_log_print( - ANDROID_LOG_DEBUG, - WEBRTC_LOG_TAG, - "Codec[%d] %s, pltype=%d, bitRate=%d, maxBitRate=%d," - " width=%d, height=%d, frameRate=%d\n", - i, codecToList.plName, codecToList.plType, - codecToList.startBitrate, codecToList.maxBitrate, - codecToList.width, codecToList.height, - codecToList.maxFramerate); - } - - return ret; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: AddRemoteRenderer - * Signature: (ILjava/lang/Object;)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_AddRemoteRenderer( - JNIEnv *, - jobject, - jint channel, - jobject glSurface) -{ - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "AddRemoteRenderer"); - if (vieData.vie) { - return vieData.render->AddRenderer(channel, glSurface, 0, 0, 0, 1, 1); - } - else { - return -1; - } -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: RemoveRemoteRenderer - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_RemoveRemoteRenderer( - JNIEnv *, - jobject, - jint channel) -{ - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "RemoveRemoteRenderer"); - - if (vieData.vie) { - return vieData.render->RemoveRenderer(channel); - } - else { - return -1; - } - return 0; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: StartRender - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartRender( - JNIEnv *, - jobject, - jint channel) -{ - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StartRender"); - - if (vieData.render) { - return vieData.render->StartRender(channel); - } - else { - return -1; - } -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: StartCamera - * Signature: (II)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartCamera( - JNIEnv * env, - jobject, - jint channel, - jint cameraNum) -{ - if (NULL == vieData.vie) - return -1; - - int i = 0; - char deviceName[64]; - char deviceUniqueName[64]; - int re; - do { - re = vieData.capture->GetCaptureDevice(i, deviceName, - sizeof(deviceName), - deviceUniqueName, - sizeof(deviceUniqueName)); - __android_log_print( - ANDROID_LOG_DEBUG, - WEBRTC_LOG_TAG, - "GetCaptureDevice ret %d devicenum %d deviceUniqueName %s", - re, i, deviceUniqueName); - i++; - } while (re == 0); - - int ret; - int cameraId; - vieData.capture->GetCaptureDevice(cameraNum, deviceName, - sizeof(deviceName), deviceUniqueName, - sizeof(deviceUniqueName)); - vieData.capture->AllocateCaptureDevice(deviceUniqueName, - sizeof(deviceUniqueName), cameraId); - - if (cameraId >= 0) { //Connect the - ret = vieData.capture->ConnectCaptureDevice(cameraId, channel); - __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, - "ConnectCaptureDevice ret %d ", ret); - - ret = vieData.capture->StartCapture(cameraId); - __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, - "StartCapture ret %d ", ret); - } - - return cameraId; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: StopCamera - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopCamera( - JNIEnv *, - jobject, - jint cameraId) -{ - if (NULL == vieData.capture) - return -1; - - int ret = vieData.capture->StopCapture(cameraId); - __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, - "StopCapture ret %d ", ret); - ret = vieData.capture->ReleaseCaptureDevice(cameraId); - __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, - "ReleaseCaptureDevice ret %d ", ret); - - return ret; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: GetCameraOrientation - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetCameraOrientation( - JNIEnv *, - jobject, - jint cameraNum) -{ - char deviceName[64]; - char deviceUniqueName[64]; - int ret; - - ret = vieData.capture->GetCaptureDevice(cameraNum, deviceName, - sizeof(deviceName), - deviceUniqueName, - sizeof(deviceUniqueName)); - if (ret != 0) { - return -1; - } - - RotateCapturedFrame orientation; - ret = vieData.capture->GetOrientation(deviceUniqueName, orientation); - __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, - "GetOrientation ret %d orientation %d", ret, - orientation); - - return (jint) orientation; - -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: SetRotation - * Signature: (II)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetRotation( - JNIEnv *, - jobject, - jint captureId, - jint degrees) -{ - - if (NULL == vieData.capture) - return -1; - RotateCapturedFrame rotation = RotateCapturedFrame_0; - if (degrees == 90) - rotation = RotateCapturedFrame_90; - else if (degrees == 180) - rotation = RotateCapturedFrame_180; - else if (degrees == 270) - rotation = RotateCapturedFrame_270; - - int ret = vieData.capture->SetRotateCapturedFrames(captureId, rotation); - return ret; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: SetExternalMediaCodecDecoderRenderer - * Signature: (ILjava/lang/Object;)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetExternalMediaCodecDecoderRenderer( - JNIEnv *env, - jobject, - jint channel, - jobject glSurface) -{ - __android_log_write( - ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "SetExternalMediaCodecDecoder"); - - jclass cls = env->FindClass("org/webrtc/videoengine/ViEMediaCodecDecoder"); - - AndroidMediaCodecDecoder* mediaCodecDecoder = - new AndroidMediaCodecDecoder(webrtcGlobalVM, glSurface, cls); - - // TODO(dwkang): Check the ownership of decoder object and release it - // if needed. - return vieData.externalCodec->RegisterExternalReceiveCodec( - channel, VCM_VP8_PAYLOAD_TYPE, mediaCodecDecoder, true); -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: EnableNACK - * Signature: (IZ)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_EnableNACK( - JNIEnv *, - jobject, - jint channel, - jboolean enable) -{ - if (NULL == vieData.rtp) - return -1; - - int ret = vieData.rtp->SetNACKStatus(channel, enable); - __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, - "EnableNACK(%d) ret:%d", enable, ret); - return ret; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: EnablePLI - * Signature: (IZ)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_EnablePLI( - JNIEnv *, - jobject, - jint channel, - jboolean enable) -{ - if (NULL == vieData.rtp) - return -1; - - if (enable) - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, - "EnablePLI enable"); - else - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, - "EnablePLI disable"); - - int ret = vieData.rtp->SetKeyFrameRequestMethod(channel, - kViEKeyFrameRequestPliRtcp); - return ret; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: SetCallback - * Signature: (ILorg/webrtc/videoengineapp/IViEAndroidCallback;)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetCallback( - JNIEnv * env, - jobject, - jint channel, - jobject callback) -{ - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "SetCallback"); - - if (NULL == vieData.codec) - return -1; - if (vieData.callback == NULL) { - vieData.callback = new VideoCallbackAndroid(vieData, env, callback); - } - else if (vieData.codec) { - vieData.codec->DeregisterDecoderObserver(channel); // Wrong channel? - vieData.codec->DeregisterEncoderObserver(channel); - } - - vieData.codec->RegisterDecoderObserver(channel, *vieData.callback); - vieData.codec->RegisterEncoderObserver(channel, *vieData.callback); - - return 0; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: StartIncomingRTPDump - * Signature: (ILjava/lang/String;)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartIncomingRTPDump( - JNIEnv* env, - jobject, - jint channel, - jstring filename) { - if (NULL == vieData.rtp) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "video RTP_RTCP interface is null"); - return -1; - } - const char* file = env->GetStringUTFChars(filename, NULL); - if (!file) { - __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Video StartRTPDump file name error"); - return -1; - } - if (vieData.rtp->StartRTPDump(channel, file, kRtpIncoming) != 0) { - __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Video StartRTPDump error"); - return -1; - } - return 0; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: StopIncomingRTPDump - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopIncomingRTPDump( - JNIEnv *, - jobject, - jint channel) { - if (NULL == vieData.rtp) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "video RTP_RTCP interface is null"); - return -1; - } - if (vieData.rtp->StopRTPDump(channel, kRtpIncoming) != 0) { - __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Video StopRTPDump error"); - return -1; - } - return 0; -} - -// -// VoiceEngine API wrapper functions -// - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_Create - * Signature: (Landroid/content/Context)Z - */ -JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Create( - JNIEnv *env, - jobject context, - jobject ctx) { - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "Create VoiceEngine"); - - VoiceEngine::SetAndroidObjects(webrtcGlobalVM, env, ctx); - - // Check if already created - if (voeData.ve) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "VoE already created"); - return false; - } - - // Create - voeData.ve = VoiceEngine::Create(); - if (!voeData.ve) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Create VoE failed"); - return false; - } - - // Get sub-APIs - if (!VE_GetSubApis()) { - // If not OK, release all sub-APIs and delete VoE - VE_ReleaseSubApis(); - if (!VoiceEngine::Delete(voeData.ve)) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Delete VoE failed"); - } - return false; - } - - return true; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_Delete - * Signature: ()Z - */ -JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Delete( - JNIEnv *, - jobject) -{ - // Check if exists - if (!voeData.ve) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "VoE does not exist"); - return false; - } - - // Release sub-APIs - VE_ReleaseSubApis(); - - // Delete - if (!VoiceEngine::Delete(voeData.ve)) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Delete VoE failed"); - return false; - } - - voeData.ve = NULL; - - // Clear instance independent Java objects - VoiceEngine::SetAndroidObjects(NULL, NULL, NULL); - - return true; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_Init - * Signature: (Z)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Init( - JNIEnv *, - jobject, - jboolean enableTrace) -{ - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "VE_Init"); - - VALIDATE_BASE_POINTER; - - return voeData.base->Init(); -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_Terminate - * Signature: ()I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Terminate( - JNIEnv *, - jobject) -{ - VALIDATE_BASE_POINTER; - - jint retVal = voeData.base->Terminate(); - return retVal; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_CreateChannel - * Signature: ()I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1CreateChannel( - JNIEnv *, - jobject) -{ - VALIDATE_BASE_POINTER; - - webrtc::CodecInst voiceCodec; - int numOfVeCodecs = voeData.codec->NumOfCodecs(); - - //enum all the supported codec - __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, - "Supported Voice Codec:\n"); - for (int i = 0; i < numOfVeCodecs; ++i) { - if (voeData.codec->GetCodec(i, voiceCodec) != -1) { - __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, - "num: %d name: %s\n", i, voiceCodec.plname); - } - } - - jint channel = voeData.base->CreateChannel(); - voeData.transport.reset(new test::VoiceChannelTransport(voeData.netw, - channel)); - return channel; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_DeleteChannel - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1DeleteChannel( - JNIEnv *, - jobject, - jint channel) -{ - VALIDATE_BASE_POINTER; - voeData.transport.reset(NULL); - return voeData.base->DeleteChannel(channel); -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: ViE_DeleteChannel - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_ViE_1DeleteChannel( - JNIEnv *, - jobject, - jint channel) -{ - VALIDATE_BASE_POINTER; - vieData.transport.reset(NULL); - return vieData.base->DeleteChannel(channel); -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_SetLocalReceiver - * Signature: (II)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetLocalReceiver( - JNIEnv *, - jobject, - jint channel, - jint port) -{ - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "SetLocalReceiver"); - VALIDATE_BASE_POINTER; - if (voeData.transport.get()) { - return voeData.transport->SetLocalReceiver(port); - } - return -1; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_SetSendDestination - * Signature: (IILjava/lang/String;)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetSendDestination( - JNIEnv *env, - jobject, - jint channel, - jint port, - jstring ipaddr) -{ - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "SetSendDestination"); - VALIDATE_BASE_POINTER; - - const char* ipaddrNative = env->GetStringUTFChars(ipaddr, NULL); - if (!ipaddrNative) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Could not get UTF string"); - return -1; - } - if (voeData.transport.get()) { - jint retVal = voeData.transport->SetSendDestination(ipaddrNative, port); - env->ReleaseStringUTFChars(ipaddr, ipaddrNative); - return retVal; - } - env->ReleaseStringUTFChars(ipaddr, ipaddrNative); - return -1; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StartListen - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartListen( - JNIEnv *, - jobject, - jint channel) -{ - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StartListen"); - VALIDATE_BASE_POINTER; - return voeData.base->StartReceive(channel); -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StartPlayout - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartPlayout( - JNIEnv *, - jobject, - jint channel) -{ - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StartPlayout"); - VALIDATE_BASE_POINTER; - return voeData.base->StartPlayout(channel); -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StartSend - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartSend( - JNIEnv *, - jobject, - jint channel) -{ - __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StartSend"); - VALIDATE_BASE_POINTER; - return voeData.base->StartSend(channel); -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StopListen - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopListen( - JNIEnv *, - jobject, - jint channel) -{ - VALIDATE_BASE_POINTER; - return voeData.base->StartReceive(channel); -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StopPlayout - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopPlayout( - JNIEnv *, - jobject, - jint channel) -{ - VALIDATE_BASE_POINTER; - return voeData.base->StopPlayout(channel); -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StopSend - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopSend( - JNIEnv *, - jobject, - jint channel) -{ - VALIDATE_BASE_POINTER; - return voeData.base->StopSend(channel); -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_SetSpeakerVolume - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetSpeakerVolume( - JNIEnv *, - jobject, - jint level) -{ - VALIDATE_VOLUME_POINTER; - - if (voeData.volume->SetSpeakerVolume(level) != 0) { - return -1; - } - return 0; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_SetLoudspeakerStatus - * Signature: (Z)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetLoudspeakerStatus( - JNIEnv *, - jobject, - jboolean enable) { - VALIDATE_HARDWARE_POINTER; - if (voeData.hardware->SetLoudspeakerStatus(enable) != 0) { - __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "SetLoudspeakerStatus(%d) failed", enable); - return -1; - } - return 0; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StartPlayingFileLocally - * Signature: (ILjava/lang/String;Z)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartPlayingFileLocally( - JNIEnv * env, - jobject, - jint channel, - jstring fileName, - jboolean loop) -{ - VALIDATE_FILE_POINTER; - - const char* fileNameNative = env->GetStringUTFChars(fileName, NULL); - if (!fileNameNative) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Could not get UTF string"); - return -1; - } - - jint retVal = voeData.file->StartPlayingFileLocally(channel, - fileNameNative, - loop); - - env->ReleaseStringUTFChars(fileName, fileNameNative); - - return retVal; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StopPlayingFileLocally - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopPlayingFileLocally( - JNIEnv *, - jobject, - jint channel) -{ - VALIDATE_FILE_POINTER; - return voeData.file->StopPlayingFileLocally(channel); -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StartPlayingFileAsMicrophone - * Signature: (ILjava/lang/String;Z)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartPlayingFileAsMicrophone( - JNIEnv *env, - jobject, - jint channel, - jstring fileName, - jboolean loop) -{ - VALIDATE_FILE_POINTER; - - const char* fileNameNative = env->GetStringUTFChars(fileName, NULL); - if (!fileNameNative) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Could not get UTF string"); - return -1; - } - - jint retVal = voeData.file->StartPlayingFileAsMicrophone(channel, - fileNameNative, - loop); - - env->ReleaseStringUTFChars(fileName, fileNameNative); - - return retVal; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StopPlayingFileAsMicrophone - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopPlayingFileAsMicrophone( - JNIEnv *, - jobject, - jint channel) -{ - VALIDATE_FILE_POINTER; - return voeData.file->StopPlayingFileAsMicrophone(channel); -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_NumOfCodecs - * Signature: ()I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1NumOfCodecs( - JNIEnv *, - jobject) -{ - VALIDATE_CODEC_POINTER; - return voeData.codec->NumOfCodecs(); -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_NumOfCodecs - * Signature: ()I - */ -JNIEXPORT jobjectArray JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1GetCodecs( - JNIEnv *env, - jobject) -{ - if (!voeData.codec) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Codec pointer doesn't exist"); - return NULL; - } - - jobjectArray ret; - int i; - int num = voeData.codec->NumOfCodecs(); - char info[256]; - - ret = (jobjectArray)env->NewObjectArray( - num, - env->FindClass("java/lang/String"), - env->NewStringUTF("")); - - for(i = 0; i < num; i++) { - webrtc::CodecInst codecToList; - voeData.codec->GetCodec(i, codecToList); - int written = snprintf(info, sizeof(info), - "%s type:%d freq:%d pac:%d ch:%d rate:%d", - codecToList.plname, codecToList.pltype, - codecToList.plfreq, codecToList.pacsize, - codecToList.channels, codecToList.rate); - if (written < 0 || written >= static_cast(sizeof(info))) { - assert(false); - } - __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, - "VoiceEgnine Codec[%d] %s", i, info); - env->SetObjectArrayElement(ret, i, env->NewStringUTF( info )); - } - - return ret; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_SetSendCodec - * Signature: (II)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetSendCodec( - JNIEnv *, - jobject, - jint channel, - jint index) -{ - VALIDATE_CODEC_POINTER; - - webrtc::CodecInst codec; - - for (int i = 0; i < voeData.codec->NumOfCodecs(); ++i) { - webrtc::CodecInst codecToList; - voeData.codec->GetCodec(i, codecToList); - __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, - "VE Codec list %s, pltype=%d\n", - codecToList.plname, codecToList.pltype); - } - - if (voeData.codec->GetCodec(index, codec) != 0) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Failed to get codec"); - return -1; - } - __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "SetSendCodec %s\n", - codec.plname); - - return voeData.codec->SetSendCodec(channel, codec); -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_SetECStatus - * Signature: (Z)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetECStatus( - JNIEnv *, - jobject, - jboolean enable) { - VALIDATE_APM_POINTER; - if (voeData.apm->SetEcStatus(enable, kEcAecm) < 0) { - __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Failed SetECStatus(%d,%d)", enable, kEcAecm); - return -1; - } - if (voeData.apm->SetAecmMode(kAecmSpeakerphone, false) != 0) { - __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Failed SetAecmMode(%d,%d)", kAecmSpeakerphone, 0); - return -1; - } - return 0; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_SetAGCStatus - * Signature: (Z)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetAGCStatus( - JNIEnv *, - jobject, - jboolean enable) { - VALIDATE_APM_POINTER; - if (voeData.apm->SetAgcStatus(enable, kAgcFixedDigital) < 0) { - __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Failed SetAgcStatus(%d,%d)", enable, kAgcFixedDigital); - return -1; - } - webrtc::AgcConfig config; - // The following settings are by default, explicitly set here. - config.targetLeveldBOv = 3; - config.digitalCompressionGaindB = 9; - config.limiterEnable = true; - if (voeData.apm->SetAgcConfig(config) != 0) { - __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Failed SetAgcConfig(%d,%d,%d)", - config.targetLeveldBOv, - config.digitalCompressionGaindB, - config.limiterEnable); - return -1; - } - return 0; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_SetNSStatus - * Signature: (Z)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetNSStatus( - JNIEnv *, - jobject, - jboolean enable) { - VALIDATE_APM_POINTER; - if (voeData.apm->SetNsStatus(enable, kNsModerateSuppression) < 0) { - __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Failed SetNsStatus(%d,%d)", - enable, kNsModerateSuppression); - return -1; - } - return 0; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StartDebugRecording - * Signature: (Ljava/lang/String)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartDebugRecording( - JNIEnv* env, - jobject, - jstring filename) { - VALIDATE_APM_POINTER; - - const char* file = env->GetStringUTFChars(filename, NULL); - if (!file) { - __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Voice StartDebugRecording file error"); - return -1; - } - if (voeData.apm->StartDebugRecording(file) != 0) { - __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Voice StartDebugRecording error"); - return -1; - } - return 0; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StopDebugRecording - * Signature: ()I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopDebugRecording( - JNIEnv *, - jobject) { - VALIDATE_APM_POINTER; - if (voeData.apm->StopDebugRecording() < 0) { - __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Voice StopDebugRecording error"); - return -1; - } - return 0; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StartIncomingRTPDump - * Signature: (ILjava/lang/String;)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartIncomingRTPDump( - JNIEnv* env, - jobject, - jint channel, - jstring filename) { - VALIDATE_RTP_POINTER; - const char* file = env->GetStringUTFChars(filename, NULL); - if (!file) { - __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Voice StartRTPDump file error"); - return -1; - } - if (voeData.rtp->StartRTPDump(channel, file, kRtpIncoming) != 0) { - __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Voice StartRTPDump error"); - return -1; - } - return 0; -} - -/* - * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI - * Method: VoE_StopIncomingRTPDump - * Signature: (I)I - */ -JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopIncomingRTPDump( - JNIEnv *, - jobject, - jint channel) { - VALIDATE_RTP_POINTER; - if (voeData.rtp->StopRTPDump(channel) < 0) { - __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Voice StopRTPDump error"); - return -1; - } - return 0; -} - -// -// local function -// - -// Get all sub-APIs -bool VE_GetSubApis() { - bool getOK = true; - - // Base - voeData.base = VoEBase::GetInterface(voeData.ve); - if (!voeData.base) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Get base sub-API failed"); - getOK = false; - } - - // Codec - voeData.codec = VoECodec::GetInterface(voeData.ve); - if (!voeData.codec) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Get codec sub-API failed"); - getOK = false; - } - - // File - voeData.file = VoEFile::GetInterface(voeData.ve); - if (!voeData.file) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Get file sub-API failed"); - getOK = false; - } - - // Network - voeData.netw = VoENetwork::GetInterface(voeData.ve); - if (!voeData.netw) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Get network sub-API failed"); - getOK = false; - } - - // audioprocessing - voeData.apm = VoEAudioProcessing::GetInterface(voeData.ve); - if (!voeData.apm) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Get VoEAudioProcessing sub-API failed"); - getOK = false; - } - - // Volume - voeData.volume = VoEVolumeControl::GetInterface(voeData.ve); - if (!voeData.volume) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Get volume sub-API failed"); - getOK = false; - } - - // Hardware - voeData.hardware = VoEHardware::GetInterface(voeData.ve); - if (!voeData.hardware) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Get hardware sub-API failed"); - getOK = false; - } - - // RTP - voeData.rtp = VoERTP_RTCP::GetInterface(voeData.ve); - if (!voeData.rtp) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Get rtp sub-API failed"); - getOK = false; - } - - return getOK; -} - -// Release all sub-APIs -bool VE_ReleaseSubApis() { - bool releaseOK = true; - - // Base - if (voeData.base) { - if (0 != voeData.base->Release()) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Release base sub-API failed"); - releaseOK = false; - } - else { - voeData.base = NULL; - } - } - - // Codec - if (voeData.codec) { - if (0 != voeData.codec->Release()) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Release codec sub-API failed"); - releaseOK = false; - } - else { - voeData.codec = NULL; - } - } - - // File - if (voeData.file) { - if (0 != voeData.file->Release()) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Release file sub-API failed"); - releaseOK = false; - } - else { - voeData.file = NULL; - } - } - - // Network - if (voeData.netw) { - if (0 != voeData.netw->Release()) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Release network sub-API failed"); - releaseOK = false; - } - else { - voeData.netw = NULL; - } - } - - // apm - if (voeData.apm) { - if (0 != voeData.apm->Release()) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Release apm sub-API failed"); - releaseOK = false; - } - else { - voeData.apm = NULL; - } - } - - // Volume - if (voeData.volume) { - if (0 != voeData.volume->Release()) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Release volume sub-API failed"); - releaseOK = false; - } - else { - voeData.volume = NULL; - } - } - - // Hardware - if (voeData.hardware) { - if (0 != voeData.hardware->Release()) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Release hardware sub-API failed"); - releaseOK = false; - } - else { - voeData.hardware = NULL; - } - } - - if (voeData.rtp) { - if (0 != voeData.rtp->Release()) { - __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, - "Release rtp sub-API failed"); - releaseOK = false; - } - else { - voeData.rtp = NULL; - } - } - - return releaseOK; -} diff --git a/media/webrtc/trunk/webrtc/video_engine/test/android/project.properties b/media/webrtc/trunk/webrtc/video_engine/test/android/project.properties deleted file mode 100644 index ddd0fc4187ed..000000000000 --- a/media/webrtc/trunk/webrtc/video_engine/test/android/project.properties +++ /dev/null @@ -1,13 +0,0 @@ -# This file is automatically generated by Android Tools. -# Do not modify this file -- YOUR CHANGES WILL BE ERASED! -# -# This file must be checked in Version Control Systems. -# -# To customize properties used by the Ant build system use, -# "ant.properties", and override values to adapt the script to your -# project structure. - -# Indicates whether an apk should be generated for each density. -split.density=false -# Project target. -target=android-10 diff --git a/media/webrtc/trunk/webrtc/video_engine/test/android/res/drawable/logo.png b/media/webrtc/trunk/webrtc/video_engine/test/android/res/drawable/logo.png deleted file mode 100644 index a07c69fa5a0f..000000000000 Binary files a/media/webrtc/trunk/webrtc/video_engine/test/android/res/drawable/logo.png and /dev/null differ diff --git a/media/webrtc/trunk/webrtc/video_engine/test/android/res/layout/aconfig.xml b/media/webrtc/trunk/webrtc/video_engine/test/android/res/layout/aconfig.xml deleted file mode 100644 index 5f995c6fb9b3..000000000000 --- a/media/webrtc/trunk/webrtc/video_engine/test/android/res/layout/aconfig.xml +++ /dev/null @@ -1,87 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/media/webrtc/trunk/webrtc/video_engine/test/android/res/layout/both.xml b/media/webrtc/trunk/webrtc/video_engine/test/android/res/layout/both.xml deleted file mode 100644 index d29d9063fc64..000000000000 --- a/media/webrtc/trunk/webrtc/video_engine/test/android/res/layout/both.xml +++ /dev/null @@ -1,43 +0,0 @@ - - - - - - -