Bug 1766646 - (faux-vendor) add libwebrtc/examples

(skip-generation)
This commit is contained in:
Michael Froman 2022-01-28 15:10:17 -06:00 коммит произвёл Connor Sheehan
Родитель d814168455
Коммит bffce67d59
267 изменённых файлов: 30666 добавлений и 0 удалений

903
third_party/libwebrtc/examples/BUILD.gn поставляемый Normal file
Просмотреть файл

@ -0,0 +1,903 @@
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
import("../webrtc.gni")
if (is_android) {
import("//build/config/android/config.gni")
import("//build/config/android/rules.gni")
} else if (is_mac) {
import("//build/config/mac/rules.gni")
} else if (is_ios) {
import("//build/config/ios/rules.gni")
}
group("examples") {
# This target shall build all targets in examples.
testonly = true
deps = []
if (is_android) {
deps += [
":AppRTCMobile",
":AppRTCMobile_test_apk",
":libwebrtc_unity",
"androidvoip",
]
# TODO(sakal): We include some code from the tests. Remove this dependency
# and remove this if-clause.
if (rtc_include_tests) {
deps += [ "androidnativeapi" ]
}
}
if (!build_with_chromium) {
deps += [ ":stun_prober" ]
}
if (is_ios || (is_mac && target_cpu != "x86")) {
deps += [ ":AppRTCMobile" ]
}
if (is_linux || is_chromeos || is_win) {
deps += [
":peerconnection_server",
":stunserver",
":turnserver",
]
if (current_os != "winuwp") {
deps += [ ":peerconnection_client" ]
}
}
if (is_android || is_win) {
deps += [ ":webrtc_unity_plugin" ]
}
}
rtc_library("read_auth_file") {
testonly = true
sources = [
"turnserver/read_auth_file.cc",
"turnserver/read_auth_file.h",
]
deps = [ "../rtc_base" ]
}
if (rtc_include_tests) {
rtc_test("examples_unittests") {
testonly = true
sources = [ "turnserver/read_auth_file_unittest.cc" ]
deps = [
":read_auth_file",
"../test:test_main",
"//test:test_support",
"//testing/gtest",
]
}
}
if (is_android) {
rtc_android_apk("AppRTCMobile") {
testonly = true
apk_name = "AppRTCMobile"
android_manifest = "androidapp/AndroidManifest.xml"
min_sdk_version = 21
target_sdk_version = 29
deps = [
":AppRTCMobile_javalib",
":AppRTCMobile_resources",
"../rtc_base:base_java",
]
shared_libraries = [ "../sdk/android:libjingle_peerconnection_so" ]
}
rtc_android_library("AppRTCMobile_javalib") {
testonly = true
android_manifest = "androidapp/AndroidManifest.xml"
sources = [
"androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java",
"androidapp/src/org/appspot/apprtc/AppRTCBluetoothManager.java",
"androidapp/src/org/appspot/apprtc/AppRTCClient.java",
"androidapp/src/org/appspot/apprtc/AppRTCProximitySensor.java",
"androidapp/src/org/appspot/apprtc/CallActivity.java",
"androidapp/src/org/appspot/apprtc/CallFragment.java",
"androidapp/src/org/appspot/apprtc/CaptureQualityController.java",
"androidapp/src/org/appspot/apprtc/ConnectActivity.java",
"androidapp/src/org/appspot/apprtc/CpuMonitor.java",
"androidapp/src/org/appspot/apprtc/DirectRTCClient.java",
"androidapp/src/org/appspot/apprtc/HudFragment.java",
"androidapp/src/org/appspot/apprtc/PeerConnectionClient.java",
"androidapp/src/org/appspot/apprtc/RecordedAudioToFileController.java",
"androidapp/src/org/appspot/apprtc/RoomParametersFetcher.java",
"androidapp/src/org/appspot/apprtc/RtcEventLog.java",
"androidapp/src/org/appspot/apprtc/SettingsActivity.java",
"androidapp/src/org/appspot/apprtc/SettingsFragment.java",
"androidapp/src/org/appspot/apprtc/TCPChannelClient.java",
"androidapp/src/org/appspot/apprtc/UnhandledExceptionHandler.java",
"androidapp/src/org/appspot/apprtc/WebSocketChannelClient.java",
"androidapp/src/org/appspot/apprtc/WebSocketRTCClient.java",
"androidapp/src/org/appspot/apprtc/util/AppRTCUtils.java",
"androidapp/src/org/appspot/apprtc/util/AsyncHttpURLConnection.java",
]
resources_package = "org.appspot.apprtc"
deps = [
":AppRTCMobile_resources",
"../rtc_base:base_java",
"../sdk/android:audio_api_java",
"../sdk/android:base_java",
"../sdk/android:camera_java",
"../sdk/android:default_video_codec_factory_java",
"../sdk/android:filevideo_java",
"../sdk/android:hwcodecs_java",
"../sdk/android:java_audio_device_module_java",
"../sdk/android:libjingle_peerconnection_java",
"../sdk/android:libjingle_peerconnection_metrics_default_java",
"../sdk/android:peerconnection_java",
"../sdk/android:screencapturer_java",
"../sdk/android:surfaceviewrenderer_java",
"../sdk/android:swcodecs_java",
"../sdk/android:video_api_java",
"../sdk/android:video_java",
"androidapp/third_party/autobanh:autobanh_java",
"//third_party/android_deps:com_android_support_support_annotations_java",
]
}
android_resources("AppRTCMobile_resources") {
testonly = true
sources = [
"androidapp/res/drawable-hdpi/disconnect.png",
"androidapp/res/drawable-hdpi/ic_action_full_screen.png",
"androidapp/res/drawable-hdpi/ic_action_return_from_full_screen.png",
"androidapp/res/drawable-hdpi/ic_launcher.png",
"androidapp/res/drawable-hdpi/ic_loopback_call.png",
"androidapp/res/drawable-ldpi/disconnect.png",
"androidapp/res/drawable-ldpi/ic_action_full_screen.png",
"androidapp/res/drawable-ldpi/ic_action_return_from_full_screen.png",
"androidapp/res/drawable-ldpi/ic_launcher.png",
"androidapp/res/drawable-ldpi/ic_loopback_call.png",
"androidapp/res/drawable-mdpi/disconnect.png",
"androidapp/res/drawable-mdpi/ic_action_full_screen.png",
"androidapp/res/drawable-mdpi/ic_action_return_from_full_screen.png",
"androidapp/res/drawable-mdpi/ic_launcher.png",
"androidapp/res/drawable-mdpi/ic_loopback_call.png",
"androidapp/res/drawable-xhdpi/disconnect.png",
"androidapp/res/drawable-xhdpi/ic_action_full_screen.png",
"androidapp/res/drawable-xhdpi/ic_action_return_from_full_screen.png",
"androidapp/res/drawable-xhdpi/ic_launcher.png",
"androidapp/res/drawable-xhdpi/ic_loopback_call.png",
"androidapp/res/layout/activity_call.xml",
"androidapp/res/layout/activity_connect.xml",
"androidapp/res/layout/fragment_call.xml",
"androidapp/res/layout/fragment_hud.xml",
"androidapp/res/menu/connect_menu.xml",
"androidapp/res/values-v17/styles.xml",
"androidapp/res/values-v21/styles.xml",
"androidapp/res/values/arrays.xml",
"androidapp/res/values/strings.xml",
"androidapp/res/xml/preferences.xml",
]
create_srcjar = false
# Needed for Bazel converter.
custom_package = "org.appspot.apprtc"
resource_dirs = [ "androidapp/res" ]
assert(resource_dirs != []) # Mark as used.
}
rtc_instrumentation_test_apk("AppRTCMobile_test_apk") {
apk_name = "AppRTCMobileTest"
android_manifest = "androidtests/AndroidManifest.xml"
min_sdk_version = 21
target_sdk_version = 21
sources = [
"androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java",
]
apk_under_test = ":AppRTCMobile"
deps = [
":AppRTCMobile_javalib",
"../sdk/android:base_java",
"../sdk/android:camera_java",
"../sdk/android:libjingle_peerconnection_java",
"../sdk/android:peerconnection_java",
"../sdk/android:video_api_java",
"../sdk/android:video_java",
"//third_party/android_support_test_runner:runner_java",
"//third_party/junit",
]
}
}
if (is_ios || (is_mac && target_cpu != "x86")) {
config("apprtc_common_config") {
include_dirs = [ "objc/AppRTCMobile/common" ]
}
rtc_library("apprtc_common") {
testonly = true
sources = [
"objc/AppRTCMobile/common/ARDUtilities.h",
"objc/AppRTCMobile/common/ARDUtilities.m",
]
public_configs = [ ":apprtc_common_config" ]
if (is_ios) {
# iOS must use WebRTC.framework which is dynamically linked.
deps = [ "../sdk:framework_objc+link" ]
} else {
deps = [ "../sdk:mac_framework_objc+link" ]
}
}
config("apprtc_signaling_config") {
include_dirs = [ "objc/AppRTCMobile" ]
cflags_objc = [
# TODO(bugs.webrtc.org/10837): Remove this when usage of
# archivedDataWithRootObject will be removed.
"-Wno-unguarded-availability",
]
}
rtc_library("apprtc_signaling") {
testonly = true
sources = [
"objc/AppRTCMobile/ARDAppClient+Internal.h",
"objc/AppRTCMobile/ARDAppClient.h",
"objc/AppRTCMobile/ARDAppClient.m",
"objc/AppRTCMobile/ARDAppEngineClient.h",
"objc/AppRTCMobile/ARDAppEngineClient.m",
"objc/AppRTCMobile/ARDBitrateTracker.h",
"objc/AppRTCMobile/ARDBitrateTracker.m",
"objc/AppRTCMobile/ARDCaptureController.h",
"objc/AppRTCMobile/ARDCaptureController.m",
"objc/AppRTCMobile/ARDExternalSampleCapturer.h",
"objc/AppRTCMobile/ARDExternalSampleCapturer.m",
"objc/AppRTCMobile/ARDJoinResponse+Internal.h",
"objc/AppRTCMobile/ARDJoinResponse.h",
"objc/AppRTCMobile/ARDJoinResponse.m",
"objc/AppRTCMobile/ARDMessageResponse+Internal.h",
"objc/AppRTCMobile/ARDMessageResponse.h",
"objc/AppRTCMobile/ARDMessageResponse.m",
"objc/AppRTCMobile/ARDRoomServerClient.h",
"objc/AppRTCMobile/ARDSettingsModel+Private.h",
"objc/AppRTCMobile/ARDSettingsModel.h",
"objc/AppRTCMobile/ARDSettingsModel.m",
"objc/AppRTCMobile/ARDSettingsStore.h",
"objc/AppRTCMobile/ARDSettingsStore.m",
"objc/AppRTCMobile/ARDSignalingChannel.h",
"objc/AppRTCMobile/ARDSignalingMessage.h",
"objc/AppRTCMobile/ARDSignalingMessage.m",
"objc/AppRTCMobile/ARDStatsBuilder.h",
"objc/AppRTCMobile/ARDStatsBuilder.m",
"objc/AppRTCMobile/ARDTURNClient+Internal.h",
"objc/AppRTCMobile/ARDTURNClient.h",
"objc/AppRTCMobile/ARDTURNClient.m",
"objc/AppRTCMobile/ARDWebSocketChannel.h",
"objc/AppRTCMobile/ARDWebSocketChannel.m",
"objc/AppRTCMobile/RTCIceCandidate+JSON.h",
"objc/AppRTCMobile/RTCIceCandidate+JSON.m",
"objc/AppRTCMobile/RTCIceServer+JSON.h",
"objc/AppRTCMobile/RTCIceServer+JSON.m",
"objc/AppRTCMobile/RTCSessionDescription+JSON.h",
"objc/AppRTCMobile/RTCSessionDescription+JSON.m",
]
public_configs = [ ":apprtc_signaling_config" ]
deps = [
":apprtc_common",
":socketrocket",
]
if (is_ios) {
# iOS must use WebRTC.framework which is dynamically linked.
deps += [
"../sdk:framework_objc+link",
"../sdk:ios_framework_bundle",
]
} else {
deps += [ "../sdk:mac_framework_objc+link" ]
}
frameworks = [
"CoreMedia.framework",
"QuartzCore.framework",
]
}
if (is_ios) {
rtc_library("AppRTCMobile_lib") {
# iOS must use WebRTC.framework which is dynamically linked.
testonly = true
sources = [
"objc/AppRTCMobile/ios/ARDAppDelegate.h",
"objc/AppRTCMobile/ios/ARDAppDelegate.m",
"objc/AppRTCMobile/ios/ARDFileCaptureController.h",
"objc/AppRTCMobile/ios/ARDFileCaptureController.m",
"objc/AppRTCMobile/ios/ARDMainView.h",
"objc/AppRTCMobile/ios/ARDMainView.m",
"objc/AppRTCMobile/ios/ARDMainViewController.h",
"objc/AppRTCMobile/ios/ARDMainViewController.m",
"objc/AppRTCMobile/ios/ARDSettingsViewController.h",
"objc/AppRTCMobile/ios/ARDSettingsViewController.m",
"objc/AppRTCMobile/ios/ARDStatsView.h",
"objc/AppRTCMobile/ios/ARDStatsView.m",
"objc/AppRTCMobile/ios/ARDVideoCallView.h",
"objc/AppRTCMobile/ios/ARDVideoCallView.m",
"objc/AppRTCMobile/ios/ARDVideoCallViewController.h",
"objc/AppRTCMobile/ios/ARDVideoCallViewController.m",
"objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.h",
"objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.m",
"objc/AppRTCMobile/ios/UIImage+ARDUtilities.h",
"objc/AppRTCMobile/ios/UIImage+ARDUtilities.m",
]
configs += [ "..:common_objc" ]
deps = [
":apprtc_common",
":apprtc_signaling",
"../sdk:framework_objc+link",
"../sdk:ios_framework_bundle",
]
frameworks = [ "AVFoundation.framework" ]
}
ios_app_bundle("AppRTCMobile") {
testonly = true
sources = [ "objc/AppRTCMobile/ios/main.m" ]
info_plist = "objc/AppRTCMobile/ios/Info.plist"
configs += [ "..:common_config" ]
public_configs = [ "..:common_inherited_config" ]
deps = [
":AppRTCMobile_ios_bundle_data",
":AppRTCMobile_lib",
"../sdk:framework_objc",
"../sdk:ios_framework_bundle",
]
if (rtc_apprtcmobile_broadcast_extension) {
deps += [
":AppRTCMobileBroadcastSetupUI_extension_bundle",
":AppRTCMobileBroadcastUpload_extension_bundle",
]
}
if (target_cpu == "x86") {
deps += [ "//testing/iossim" ]
}
}
if (rtc_apprtcmobile_broadcast_extension) {
bundle_data("AppRTCMobileBroadcastUpload_extension_bundle") {
testonly = true
public_deps = [ # no-presubmit-check TODO(webrtc:8603)
":AppRTCMobileBroadcastUpload", # prevent code format
]
sources = [ "$root_out_dir/AppRTCMobileBroadcastUpload.appex" ]
outputs = [ "{{bundle_contents_dir}}/Plugins/{{source_file_part}}" ]
}
bundle_data("AppRTCMobileBroadcastSetupUI_extension_bundle") {
testonly = true
public_deps = [ # no-presubmit-check TODO(webrtc:8603)
":AppRTCMobileBroadcastSetupUI", # prevent code format
]
sources = [ "$root_out_dir/AppRTCMobileBroadcastSetupUI.appex" ]
outputs = [ "{{bundle_contents_dir}}/Plugins/{{source_file_part}}" ]
}
rtc_library("AppRTCMobileBroadcastUpload_lib") {
testonly = true
sources = [
"objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.h",
"objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.m",
]
deps = [
":apprtc_signaling",
"../sdk:framework_objc+link",
"../sdk:ios_framework_bundle",
]
frameworks = [ "ReplayKit.framework" ]
}
ios_appex_bundle("AppRTCMobileBroadcastUpload") {
testonly = true
configs += [ "..:common_config" ]
public_configs = [ "..:common_inherited_config" ]
info_plist = "objc/AppRTCMobile/ios/broadcast_extension/BroadcastUploadInfo.plist"
deps = [
":AppRTCMobileBroadcastUpload_lib",
"../sdk:framework_objc",
]
}
ios_appex_bundle("AppRTCMobileBroadcastSetupUI") {
sources = [
"objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.h",
"objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.m",
]
info_plist = "objc/AppRTCMobile/ios/broadcast_extension/BroadcastSetupUIInfo.plist"
frameworks = [ "ReplayKit.framework" ]
deps = [ ":AppRTCMobile_ios_bundle_data" ]
}
}
bundle_data("AppRTCMobile_ios_bundle_data") {
sources = [
"objc/AppRTCMobile/ios/resources/Roboto-Regular.ttf",
# Sample video taken from https://media.xiph.org/video/derf/
"objc/AppRTCMobile/ios/resources/foreman.mp4",
"objc/AppRTCMobile/ios/resources/iPhone5@2x.png",
"objc/AppRTCMobile/ios/resources/iPhone6@2x.png",
"objc/AppRTCMobile/ios/resources/iPhone6p@3x.png",
"objc/AppRTCMobile/ios/resources/ic_call_end_black_24dp.png",
"objc/AppRTCMobile/ios/resources/ic_call_end_black_24dp@2x.png",
"objc/AppRTCMobile/ios/resources/ic_clear_black_24dp.png",
"objc/AppRTCMobile/ios/resources/ic_clear_black_24dp@2x.png",
"objc/AppRTCMobile/ios/resources/ic_settings_black_24dp.png",
"objc/AppRTCMobile/ios/resources/ic_settings_black_24dp@2x.png",
"objc/AppRTCMobile/ios/resources/ic_surround_sound_black_24dp.png",
"objc/AppRTCMobile/ios/resources/ic_surround_sound_black_24dp@2x.png",
"objc/AppRTCMobile/ios/resources/ic_switch_video_black_24dp.png",
"objc/AppRTCMobile/ios/resources/ic_switch_video_black_24dp@2x.png",
"objc/AppRTCMobile/ios/resources/mozart.mp3",
"objc/Icon-120.png",
"objc/Icon-180.png",
"objc/Icon.png",
]
outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ]
}
rtc_library("ObjCNativeAPIDemo_lib") {
testonly = true
sources = [
"objcnativeapi/objc/NADAppDelegate.h",
"objcnativeapi/objc/NADAppDelegate.m",
"objcnativeapi/objc/NADViewController.h",
"objcnativeapi/objc/NADViewController.mm",
"objcnativeapi/objc/objc_call_client.h",
"objcnativeapi/objc/objc_call_client.mm",
]
deps = [
"../api:libjingle_peerconnection_api",
"../api:scoped_refptr",
"../api/audio_codecs:builtin_audio_decoder_factory",
"../api/audio_codecs:builtin_audio_encoder_factory",
"../api/rtc_event_log:rtc_event_log_factory",
"../api/task_queue:default_task_queue_factory",
"../media:rtc_audio_video",
"../modules/audio_processing",
"../modules/audio_processing:api",
"../pc:libjingle_peerconnection",
"../rtc_base",
"../rtc_base/synchronization:mutex",
"../sdk:base_objc",
"../sdk:default_codec_factory_objc",
"../sdk:helpers_objc",
"../sdk:native_api",
"../sdk:ui_objc",
"../sdk:videocapture_objc",
"../sdk:videotoolbox_objc",
]
if (current_cpu == "arm64") {
deps += [ "../sdk:metal_objc" ]
}
}
ios_app_bundle("ObjCNativeAPIDemo") {
testonly = true
sources = [ "objcnativeapi/objc/main.m" ]
info_plist = "objcnativeapi/Info.plist"
configs += [ "..:common_config" ]
public_configs = [ "..:common_inherited_config" ]
deps = [ ":ObjCNativeAPIDemo_lib" ]
if (target_cpu == "x86") {
deps += [ "//testing/iossim" ]
}
}
}
if (is_mac) {
rtc_library("AppRTCMobile_lib") {
testonly = true
sources = [
"objc/AppRTCMobile/mac/APPRTCAppDelegate.h",
"objc/AppRTCMobile/mac/APPRTCAppDelegate.m",
"objc/AppRTCMobile/mac/APPRTCViewController.h",
"objc/AppRTCMobile/mac/APPRTCViewController.m",
]
configs += [ "..:common_objc" ]
deps = [
":apprtc_common",
":apprtc_signaling",
"../sdk:mac_framework_objc+link",
]
}
mac_app_bundle("AppRTCMobile") {
testonly = true
output_name = "AppRTCMobile"
sources = [ "objc/AppRTCMobile/mac/main.m" ]
public_configs = [ "..:common_inherited_config" ]
info_plist = "objc/AppRTCMobile/mac/Info.plist"
frameworks = [ "AppKit.framework" ]
ldflags = [
"-rpath",
"@executable_path/../Frameworks",
]
deps = [
":AppRTCMobile_lib",
"../sdk:mac_framework_bundle",
"../sdk:mac_framework_objc+link",
]
}
}
config("socketrocket_include_config") {
include_dirs = [ "objc/AppRTCMobile/third_party/SocketRocket" ]
}
config("socketrocket_warning_config") {
# GN orders flags on a target before flags from configs. The default config
# adds these flags so to cancel them out they need to come from a config and
# cannot be on the target directly.
cflags = [
"-Wno-deprecated-declarations",
"-Wno-nonnull",
"-Wno-semicolon-before-method-body",
"-Wno-unused-variable",
]
cflags_objc = [
# Enabled for cflags_objc in build/config/compiler/BUILD.gn.
"-Wno-objc-missing-property-synthesis",
]
}
rtc_library("socketrocket") {
testonly = true
sources = [
"objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.h",
"objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.m",
]
configs += [ ":socketrocket_warning_config" ]
public_configs = [ ":socketrocket_include_config" ]
libs = [ "icucore" ]
frameworks = [
"CFNetwork.framework",
"Security.framework",
]
}
if (rtc_include_tests) {
# TODO(kthelgason): compile xctests on mac when chromium supports it.
if (is_ios) {
rtc_library("apprtcmobile_test_sources") {
# iOS must use WebRTC.framework which is dynamically linked.
testonly = true
include_dirs = [
"objc/AppRTCMobile",
"objc/AppRTCMobile/ios",
]
sources = [
"objc/AppRTCMobile/tests/ARDAppClient_xctest.mm",
"objc/AppRTCMobile/tests/ARDFileCaptureController_xctest.mm",
"objc/AppRTCMobile/tests/ARDSettingsModel_xctest.mm",
]
deps = [
":AppRTCMobile_lib",
":apprtc_signaling",
"../rtc_base",
"../sdk:framework_objc+link",
"../sdk:ios_framework_bundle",
"//build/config/ios:xctest",
"//third_party/ocmock",
]
}
rtc_test("apprtcmobile_tests") {
is_xctest = true
info_plist = "objc/AppRTCMobile/ios/Info.plist"
sources = [ "objc/AppRTCMobile/tests/main.mm" ]
deps = [
":AppRTCMobile_lib",
":apprtcmobile_test_sources",
"../sdk:framework_objc",
"//test:test_support",
]
ldflags = [ "-all_load" ]
}
}
}
}
if (is_linux || is_chromeos || is_win) {
rtc_executable("peerconnection_client") {
testonly = true
sources = [
"peerconnection/client/conductor.cc",
"peerconnection/client/conductor.h",
"peerconnection/client/defaults.cc",
"peerconnection/client/defaults.h",
"peerconnection/client/peer_connection_client.cc",
"peerconnection/client/peer_connection_client.h",
]
deps = [
"../api:audio_options_api",
"../api:create_peerconnection_factory",
"../api:libjingle_peerconnection_api",
"../api:media_stream_interface",
"../api:scoped_refptr",
"../api/audio:audio_mixer_api",
"../api/audio_codecs:audio_codecs_api",
"../api/video:video_frame_i420",
"../api/video:video_rtp_headers",
"../api/video_codecs:video_codecs_api",
"../media:rtc_media_base",
"../p2p:rtc_p2p",
"../rtc_base:checks",
"../rtc_base/third_party/sigslot",
"../system_wrappers:field_trial",
"../test:field_trial",
"../test:platform_video_capturer",
"../test:rtp_test_utils",
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
]
if (is_win) {
sources += [
"peerconnection/client/flag_defs.h",
"peerconnection/client/main.cc",
"peerconnection/client/main_wnd.cc",
"peerconnection/client/main_wnd.h",
]
configs += [ "//build/config/win:windowed" ]
deps += [ "../media:rtc_media_base" ]
}
if (is_linux || is_chromeos) {
sources += [
"peerconnection/client/linux/main.cc",
"peerconnection/client/linux/main_wnd.cc",
"peerconnection/client/linux/main_wnd.h",
]
cflags = [ "-Wno-deprecated-declarations" ]
libs = [
"X11",
"Xcomposite",
"Xext",
"Xrender",
]
deps += [ "//build/config/linux/gtk" ]
}
deps += [
"../api:libjingle_peerconnection_api",
"../api/audio_codecs:builtin_audio_decoder_factory",
"../api/audio_codecs:builtin_audio_encoder_factory",
"../api/video:video_frame",
"../api/video:video_rtp_headers",
"../api/video_codecs:builtin_video_decoder_factory",
"../api/video_codecs:builtin_video_encoder_factory",
"../media:rtc_audio_video",
"../modules/audio_device",
"../modules/audio_processing",
"../modules/audio_processing:api",
"../modules/video_capture:video_capture_module",
"../pc:libjingle_peerconnection",
"../pc:peerconnection",
"../rtc_base",
"../rtc_base:rtc_base_approved",
"../rtc_base:rtc_json",
"../test:video_test_common",
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
"//third_party/libyuv",
]
}
rtc_executable("peerconnection_server") {
testonly = true
sources = [
"peerconnection/server/data_socket.cc",
"peerconnection/server/data_socket.h",
"peerconnection/server/main.cc",
"peerconnection/server/peer_channel.cc",
"peerconnection/server/peer_channel.h",
"peerconnection/server/utils.cc",
"peerconnection/server/utils.h",
]
deps = [
"../rtc_base:rtc_base_approved",
"../system_wrappers:field_trial",
"../test:field_trial",
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
"//third_party/abseil-cpp/absl/flags:usage",
]
}
rtc_executable("turnserver") {
testonly = true
sources = [ "turnserver/turnserver_main.cc" ]
deps = [
":read_auth_file",
"../p2p:p2p_server_utils",
"../p2p:rtc_p2p",
"../pc:rtc_pc",
"../rtc_base",
"../rtc_base:rtc_base_approved",
]
}
rtc_executable("stunserver") {
testonly = true
sources = [ "stunserver/stunserver_main.cc" ]
deps = [
"../p2p:p2p_server_utils",
"../p2p:rtc_p2p",
"../pc:rtc_pc",
"../rtc_base",
"../rtc_base:rtc_base_approved",
]
}
}
if (is_win || is_android) {
rtc_shared_library("webrtc_unity_plugin") {
testonly = true
sources = [
"unityplugin/simple_peer_connection.cc",
"unityplugin/simple_peer_connection.h",
"unityplugin/unity_plugin_apis.cc",
"unityplugin/unity_plugin_apis.h",
"unityplugin/video_observer.cc",
"unityplugin/video_observer.h",
]
if (is_android) {
sources += [
"unityplugin/class_reference_holder.cc",
"unityplugin/class_reference_holder.h",
"unityplugin/jni_onload.cc",
]
suppressed_configs += [ "//build/config/android:hide_all_but_jni_onload" ]
}
if (is_win) {
configs += [ "//build/config/win:windowed" ]
}
deps = [
"../api:create_peerconnection_factory",
"../api:libjingle_peerconnection_api",
"../api:media_stream_interface",
"../api/audio_codecs:builtin_audio_decoder_factory",
"../api/audio_codecs:builtin_audio_encoder_factory",
"../api/video:video_frame",
"../api/video:video_rtp_headers",
"../media:rtc_audio_video",
"../media:rtc_internal_video_codecs",
"../media:rtc_media",
"../media:rtc_media_base",
"../modules/audio_device",
"../modules/audio_processing",
"../modules/audio_processing:api",
"../modules/video_capture:video_capture_module",
"../pc:libjingle_peerconnection",
"../pc:peerconnection",
"../rtc_base",
"../test:platform_video_capturer",
"../test:video_test_common",
"//third_party/abseil-cpp/absl/memory",
]
if (is_android) {
deps += [
"../modules/utility",
"../sdk/android:libjingle_peerconnection_jni",
]
}
}
}
if (is_android) {
rtc_android_library("webrtc_unity_java") {
sources = [ "unityplugin/java/src/org/webrtc/UnityUtility.java" ]
deps = [
"../rtc_base:base_java",
"../sdk/android:camera_java",
"../sdk/android:libjingle_peerconnection_java",
"../sdk/android:peerconnection_java",
"../sdk/android:video_api_java",
"../sdk/android:video_java",
"//third_party/android_deps:com_android_support_support_annotations_java",
]
}
dist_jar("libwebrtc_unity") {
_target_dir_name = get_label_info(":$target_name", "dir")
output = "${root_out_dir}/lib.java${_target_dir_name}/${target_name}.jar"
direct_deps_only = false
use_interface_jars = false
use_unprocessed_jars = false
requires_android = true
deps = [
":webrtc_unity_java",
"../rtc_base:base_java",
"../sdk/android:libjingle_peerconnection_java",
"../sdk/android:libjingle_peerconnection_metrics_default_java",
"//third_party/android_deps:com_android_support_support_annotations_java",
]
}
junit_binary("android_examples_junit_tests") {
sources = [
"androidjunit/src/org/appspot/apprtc/BluetoothManagerTest.java",
"androidjunit/src/org/appspot/apprtc/DirectRTCClientTest.java",
"androidjunit/src/org/appspot/apprtc/TCPChannelClientTest.java",
]
deps = [
":AppRTCMobile_javalib",
"../sdk/android:peerconnection_java",
"//base:base_java_test_support",
"//third_party/google-truth:google_truth_java",
]
additional_jar_files = [ [
"../sdk/android/tests/resources/robolectric.properties",
"robolectric.properties",
] ]
}
}
if (!build_with_chromium) {
# Doesn't build within Chrome on Win.
rtc_executable("stun_prober") {
testonly = true
sources = [ "stunprober/main.cc" ]
deps = [
"../p2p:libstunprober",
"../p2p:rtc_p2p",
"../rtc_base",
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
]
}
}

13
third_party/libwebrtc/examples/DEPS поставляемый Normal file
Просмотреть файл

@ -0,0 +1,13 @@
include_rules = [
"+common_video",
"+logging/rtc_event_log/rtc_event_log_factory.h",
"+media",
"+modules/audio_device",
"+modules/video_capture",
"+modules/audio_processing",
"+p2p",
"+pc",
"+sdk/objc",
"+system_wrappers/include",
"+third_party/libyuv",
]

4
third_party/libwebrtc/examples/OWNERS поставляемый Normal file
Просмотреть файл

@ -0,0 +1,4 @@
magjed@webrtc.org
perkj@webrtc.org
tkchin@webrtc.org
kthelgason@webrtc.org

16
third_party/libwebrtc/examples/aarproject/.gitignore поставляемый Normal file
Просмотреть файл

@ -0,0 +1,16 @@
# Default ignores by Android Studio
*.iml
.gradle
# We want to specify our own SDK.
# /local.properties
/.idea/workspace.xml
/.idea/libraries
.DS_Store
/build
/captures
.externalNativeBuild
# Additional ignores
/gradlew
/gradlew.bat
/gradle

1
third_party/libwebrtc/examples/aarproject/OWNERS поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
sakal@webrtc.org

1
third_party/libwebrtc/examples/aarproject/app/.gitignore поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
/build

52
third_party/libwebrtc/examples/aarproject/app/build.gradle поставляемый Normal file
Просмотреть файл

@ -0,0 +1,52 @@
apply plugin: 'com.android.application'
android {
compileSdkVersion 27
defaultConfig {
applicationId "org.appspot.apprtc"
minSdkVersion 21
targetSdkVersion 21
versionCode 1
versionName "1.0"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
sourceSets {
main {
manifest.srcFile "../../androidapp/AndroidManifest.xml"
java.srcDirs = [
"../../androidapp/src"
]
res.srcDirs = [
"../../androidapp/res"
]
}
androidTest {
manifest.srcFile "../../androidtests/AndroidManifest.xml"
java.srcDirs = [
"../../androidtests/src"
]
// This test doesn't work in Android Studio.
java.exclude('**/CallActivityStubbedInputOutputTest.java')
}
}
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation fileTree(dir: '../../androidapp/third_party/autobanh/lib', include: ['autobanh.jar'])
implementation 'com.android.support:appcompat-v7:26.1.0'
implementation 'org.webrtc:google-webrtc:1.0.+'
testImplementation 'junit:junit:4.12'
androidTestImplementation 'com.android.support.test:runner:1.0.1'
androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.1'
}

Просмотреть файл

@ -0,0 +1,25 @@
# Add project specific ProGuard rules here.
# By default, the flags in this file are appended to flags specified
# in /usr/local/google/home/sakal/Android/Sdk/tools/proguard/proguard-android.txt
# You can edit the include path and order by changing the proguardFiles
# directive in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# Add any project specific keep options here:
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

27
third_party/libwebrtc/examples/aarproject/build.gradle поставляемый Normal file
Просмотреть файл

@ -0,0 +1,27 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:4.0.0'
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
google()
jcenter()
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}

17
third_party/libwebrtc/examples/aarproject/gradle.properties поставляемый Normal file
Просмотреть файл

@ -0,0 +1,17 @@
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx1536m
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true

2
third_party/libwebrtc/examples/aarproject/local.properties поставляемый Normal file
Просмотреть файл

@ -0,0 +1,2 @@
# Use Android SDK from third_party/android_sdk/public
sdk.dir=../../third_party/android_sdk/public

1
third_party/libwebrtc/examples/aarproject/settings.gradle поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
include ':app'

60
third_party/libwebrtc/examples/androidapp/AndroidManifest.xml поставляемый Normal file
Просмотреть файл

@ -0,0 +1,60 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
package="org.appspot.apprtc"
android:versionCode="1"
android:versionName="1.0">
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<uses-feature android:glEsVersion="0x00020000" android:required="true" />
<uses-sdk android:minSdkVersion="21" android:targetSdkVersion="29" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.CHANGE_NETWORK_STATE" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.BLUETOOTH" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<!-- This is a test application that should always be debuggable. -->
<application android:label="@string/app_name"
android:icon="@drawable/ic_launcher"
android:allowBackup="false"
android:debuggable="true"
android:supportsRtl="false"
tools:ignore="HardcodedDebugMode">
<activity android:name="ConnectActivity"
android:label="@string/app_name"
android:windowSoftInputMode="adjustPan"
android:theme="@style/AppTheme">
<intent-filter>
<action android:name="android.intent.action.MAIN"/>
<category android:name="android.intent.category.LAUNCHER"/>
</intent-filter>
<intent-filter>
<action android:name="android.intent.action.VIEW"/>
<category android:name="android.intent.category.DEFAULT"/>
<category android:name="android.intent.category.BROWSABLE"/>
<data android:scheme="https" android:host="appr.tc"/>
<data android:scheme="http" android:host="appr.tc"/>
</intent-filter>
</activity>
<activity android:name="SettingsActivity"
android:label="@string/settings_name"
android:theme="@style/AppTheme">
</activity>
<activity android:name="CallActivity"
android:label="@string/app_name"
android:screenOrientation="fullUser"
android:configChanges="orientation|smallestScreenSize|screenSize|screenLayout"
android:theme="@style/CallActivityTheme">
</activity>
</application>
</manifest>

2
third_party/libwebrtc/examples/androidapp/OWNERS поставляемый Normal file
Просмотреть файл

@ -0,0 +1,2 @@
magjed@webrtc.org
sakal@webrtc.org

23
third_party/libwebrtc/examples/androidapp/README поставляемый Normal file
Просмотреть файл

@ -0,0 +1,23 @@
This directory contains an example Android client for https://appr.tc
Prerequisites:
- "Getting the code", "Compiling", and "Using the Bundled Android SDK/NDK"
on http://www.webrtc.org/native-code/android
Example of building & using the app:
cd <path/to/webrtc>/src
ninja -C out/Default AppRTCMobile
adb install -r out/Default/apks/AppRTCMobile.apk
In desktop chrome, navigate to https://appr.tc and note the r=<NNN> room
this redirects to or navigate directly to https://appr.tc/r/<NNN> with
your own room number. Launch AppRTC on the device and add same <NNN> into the room name list.
You can also run application from a command line to connect to the first room in a list:
adb shell am start -n org.appspot.apprtc/.ConnectActivity -a android.intent.action.VIEW
This should result in the app launching on Android and connecting to the 3-dot-apprtc
page displayed in the desktop browser.
To run loopback test execute following command:
adb shell am start -n org.appspot.apprtc/.ConnectActivity -a android.intent.action.VIEW --ez "org.appspot.apprtc.LOOPBACK" true

17
third_party/libwebrtc/examples/androidapp/ant.properties поставляемый Normal file
Просмотреть файл

@ -0,0 +1,17 @@
# This file is used to override default values used by the Ant build system.
#
# This file must be checked into Version Control Systems, as it is
# integral to the build system of your project.
# This file is only used by the Ant script.
# You can use this to override default values such as
# 'source.dir' for the location of your java source folder and
# 'out.dir' for the location of your output folder.
# You can also use it define how the release builds are signed by declaring
# the following properties:
# 'key.store' for the location of your keystore and
# 'key.alias' for the name of the key to use.
# The password will be asked during the build when you use the 'release' target.

92
third_party/libwebrtc/examples/androidapp/build.xml поставляемый Normal file
Просмотреть файл

@ -0,0 +1,92 @@
<?xml version="1.0" encoding="UTF-8"?>
<project name="AppRTCMobile" default="help">
<!-- The local.properties file is created and updated by the 'android' tool.
It contains the path to the SDK. It should *NOT* be checked into
Version Control Systems. -->
<property file="local.properties" />
<!-- The ant.properties file can be created by you. It is only edited by the
'android' tool to add properties to it.
This is the place to change some Ant specific build properties.
Here are some properties you may want to change/update:
source.dir
The name of the source directory. Default is 'src'.
out.dir
The name of the output directory. Default is 'bin'.
For other overridable properties, look at the beginning of the rules
files in the SDK, at tools/ant/build.xml
Properties related to the SDK location or the project target should
be updated using the 'android' tool with the 'update' action.
This file is an integral part of the build system for your
application and should be checked into Version Control Systems.
-->
<property file="ant.properties" />
<!-- if sdk.dir was not set from one of the property file, then
get it from the ANDROID_HOME env var.
This must be done before we load project.properties since
the proguard config can use sdk.dir -->
<property environment="env" />
<condition property="sdk.dir" value="${env.ANDROID_SDK_ROOT}">
<isset property="env.ANDROID_SDK_ROOT" />
</condition>
<!-- The project.properties file is created and updated by the 'android'
tool, as well as ADT.
This contains project specific properties such as project target, and library
dependencies. Lower level build properties are stored in ant.properties
(or in .classpath for Eclipse projects).
This file is an integral part of the build system for your
application and should be checked into Version Control Systems. -->
<loadproperties srcFile="project.properties" />
<!-- quick check on sdk.dir -->
<fail
message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
unless="sdk.dir"
/>
<!--
Import per project custom build rules if present at the root of the project.
This is the place to put custom intermediary targets such as:
-pre-build
-pre-compile
-post-compile (This is typically used for code obfuscation.
Compiled code location: ${out.classes.absolute.dir}
If this is not done in place, override ${out.dex.input.absolute.dir})
-post-package
-post-build
-pre-clean
-->
<import file="custom_rules.xml" optional="true" />
<!-- Import the actual build file.
To customize existing targets, there are two options:
- Customize only one target:
- copy/paste the target into this file, *before* the
<import> task.
- customize it to your needs.
- Customize the whole content of build.xml
- copy/paste the content of the rules files (minus the top node)
into this file, replacing the <import> task.
- customize to your needs.
***********************
****** IMPORTANT ******
***********************
In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
in order to avoid having your file be overridden by tools such as "android update project"
-->
<!-- version-tag: 1 -->
<import file="${sdk.dir}/tools/ant/build.xml" />
</project>

16
third_party/libwebrtc/examples/androidapp/project.properties поставляемый Normal file
Просмотреть файл

@ -0,0 +1,16 @@
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system edit
# "ant.properties", and override values to adapt the script to your
# project structure.
#
# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
# Project target.
target=android-22
java.compilerargs=-Xlint:all -Werror

Двоичные данные
third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/disconnect.png поставляемый Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 1.4 KiB

Двоичные данные
third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_action_full_screen.png поставляемый Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 587 B

Двоичные данные
third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_action_return_from_full_screen.png поставляемый Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 663 B

Двоичные данные
third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_launcher.png поставляемый Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 2.4 KiB

Двоичные данные
third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_loopback_call.png поставляемый Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 1.8 KiB

Двоичные данные
third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/disconnect.png поставляемый Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 1.4 KiB

Двоичные данные
third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_action_full_screen.png поставляемый Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 461 B

Двоичные данные
third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_action_return_from_full_screen.png поставляемый Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 477 B

Двоичные данные
third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_launcher.png поставляемый Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 2.4 KiB

Двоичные данные
third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_loopback_call.png поставляемый Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 1.8 KiB

Двоичные данные
third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/disconnect.png поставляемый Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 1.4 KiB

Двоичные данные
third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_action_full_screen.png поставляемый Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 461 B

Двоичные данные
third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_action_return_from_full_screen.png поставляемый Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 477 B

Двоичные данные
third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_launcher.png поставляемый Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 1.7 KiB

Двоичные данные
third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_loopback_call.png поставляемый Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 1.8 KiB

Двоичные данные
third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/disconnect.png поставляемый Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 1.4 KiB

Двоичные данные
third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_action_full_screen.png поставляемый Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 743 B

Двоичные данные
third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_action_return_from_full_screen.png поставляемый Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 761 B

Двоичные данные
third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_launcher.png поставляемый Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 3.3 KiB

Двоичные данные
third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_loopback_call.png поставляемый Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 1.8 KiB

Просмотреть файл

@ -0,0 +1,34 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- tools:ignore is needed because lint thinks this can be replaced with a merge. Replacing this
with a merge causes the fullscreen SurfaceView not to be centered. -->
<FrameLayout
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:ignore="MergeRootFrame">
<org.webrtc.SurfaceViewRenderer
android:id="@+id/fullscreen_video_view"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center" />
<org.webrtc.SurfaceViewRenderer
android:id="@+id/pip_video_view"
android:layout_height="144dp"
android:layout_width="wrap_content"
android:layout_gravity="bottom|end"
android:layout_margin="16dp"/>
<FrameLayout
android:id="@+id/call_fragment_container"
android:layout_width="match_parent"
android:layout_height="match_parent" />
<FrameLayout
android:id="@+id/hud_fragment_container"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</FrameLayout>

Просмотреть файл

@ -0,0 +1,80 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_margin="16dp"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical"
android:weightSum="1"
android:layout_centerHorizontal="true">
<TextView
android:id="@+id/room_edittext_description"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="@string/room_description"/>
<LinearLayout
android:orientation="horizontal"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:gravity="center"
android:layout_marginBottom="8dp">
<!-- TODO(crbug.com/900912): Fix and remove lint ignore -->
<EditText
tools:ignore="LabelFor,Autofill"
android:id="@+id/room_edittext"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:maxLines="1"
android:imeOptions="actionDone"
android:inputType="text"/>
<ImageButton
android:id="@+id/connect_button"
android:layout_width="48dp"
android:layout_height="48dp"
android:contentDescription="@string/connect_description"
android:background="@android:drawable/sym_action_call" />
<ImageButton
android:id="@+id/add_favorite_button"
android:layout_width="48dp"
android:layout_height="48dp"
android:contentDescription="@string/add_favorite_description"
android:background="@android:drawable/ic_input_add" />
</LinearLayout>
<TextView
android:id="@+id/room_listview_description"
android:layout_width="match_parent"
android:layout_height="48dp"
android:layout_marginTop="8dp"
android:lines="1"
android:maxLines="1"
android:textAppearance="?android:attr/textAppearanceMedium"
android:text="@string/favorites"
android:gravity="center_vertical"/>
<FrameLayout
android:layout_width="match_parent"
android:layout_height="0dp"
android:layout_weight="1">
<ListView
android:id="@+id/room_listview"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:drawSelectorOnTop="false" />
<TextView
android:id="@android:id/empty"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:gravity="center"
android:text="@string/no_favorites" />
</FrameLayout>
</LinearLayout>

Просмотреть файл

@ -0,0 +1,77 @@
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<TextView
android:id="@+id/contact_name_call"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_centerHorizontal="true"
android:layout_above="@+id/buttons_call_container"
android:textSize="24sp"
android:layout_margin="8dp"/>
<LinearLayout
android:id="@+id/buttons_call_container"
android:orientation="horizontal"
android:layout_above="@+id/capture_format_text_call"
android:layout_alignWithParentIfMissing="true"
android:layout_marginBottom="32dp"
android:layout_centerHorizontal="true"
android:layout_width="wrap_content"
android:layout_height="wrap_content">
<ImageButton
android:id="@+id/button_call_disconnect"
android:background="@drawable/disconnect"
android:contentDescription="@string/disconnect_call"
android:layout_marginEnd="16dp"
android:layout_width="48dp"
android:layout_height="48dp"/>
<ImageButton
android:id="@+id/button_call_switch_camera"
android:background="@android:drawable/ic_menu_camera"
android:contentDescription="@string/switch_camera"
android:layout_marginEnd="8dp"
android:layout_width="48dp"
android:layout_height="48dp"/>
<ImageButton
android:id="@+id/button_call_scaling_mode"
android:background="@drawable/ic_action_return_from_full_screen"
android:contentDescription="@string/disconnect_call"
android:layout_width="48dp"
android:layout_height="48dp"/>
<ImageButton
android:id="@+id/button_call_toggle_mic"
android:background="@android:drawable/ic_btn_speak_now"
android:contentDescription="@string/toggle_mic"
android:layout_marginEnd="8dp"
android:layout_width="48dp"
android:layout_height="48dp"/>
</LinearLayout>
<TextView
android:id="@+id/capture_format_text_call"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_centerHorizontal="true"
android:layout_above="@+id/capture_format_slider_call"
android:textSize="16sp"
android:text="@string/capture_format_change_text"/>
<SeekBar
android:id="@+id/capture_format_slider_call"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_centerHorizontal="true"
android:layout_alignParentBottom="true"
android:progress="50"
android:layout_margin="8dp"/>
</RelativeLayout>

Просмотреть файл

@ -0,0 +1,74 @@
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<ImageButton
android:id="@+id/button_toggle_debug"
android:background="@android:drawable/ic_menu_info_details"
android:contentDescription="@string/toggle_debug"
android:layout_alignParentBottom="true"
android:layout_alignParentStart="true"
android:layout_width="48dp"
android:layout_height="48dp"/>
<TextView
android:id="@+id/encoder_stat_call"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentEnd="true"
android:textStyle="bold"
android:textColor="#C000FF00"
android:textSize="12sp"
android:layout_margin="8dp"/>
<TableLayout
android:id="@+id/hudview_container"
android:layout_width="match_parent"
android:layout_height="match_parent">
<TableRow>
<TextView
android:id="@+id/hud_stat_bwe"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:alpha="0.4"
android:padding="2dip"
android:background="@android:color/white"
android:textColor="@android:color/black" />
<TextView
android:id="@+id/hud_stat_connection"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:alpha="0.4"
android:padding="2dip"
android:background="@android:color/white"
android:textColor="@android:color/black" />
</TableRow>
<TableRow>
<TextView
android:id="@+id/hud_stat_video_send"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:alpha="0.4"
android:padding="2dip"
android:background="@android:color/white"
android:textColor="@android:color/black" />
<TextView
android:id="@+id/hud_stat_video_recv"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:padding="2dip"
android:alpha="0.4"
android:background="@android:color/white"
android:textColor="@android:color/black" />
</TableRow>
</TableLayout>
</RelativeLayout>

Просмотреть файл

@ -0,0 +1,13 @@
<menu xmlns:android="http://schemas.android.com/apk/res/android">
<item
android:id="@+id/action_loopback"
android:icon="@drawable/ic_loopback_call"
android:showAsAction="always"
android:title="@string/action_loopback"/>
<item
android:id="@+id/action_settings"
android:orderInCategory="100"
android:icon="@android:drawable/ic_menu_preferences"
android:showAsAction="ifRoom"
android:title="@string/action_settings"/>
</menu>

Просмотреть файл

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<style name="AppTheme" parent="android:Theme.Holo" />
<style name="CallActivityTheme" parent="android:Theme.Black">
<item name="android:windowActionBar">false</item>
<item name="android:windowFullscreen">true</item>
<item name="android:windowNoTitle">true</item>
</style>
</resources>

Просмотреть файл

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<style name="AppTheme" parent="android:Theme.Material" />
</resources>

60
third_party/libwebrtc/examples/androidapp/res/values/arrays.xml поставляемый Normal file
Просмотреть файл

@ -0,0 +1,60 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string-array name="videoResolutions">
<item>Default</item>
<item>4K (3840 x 2160)</item>
<item>Full HD (1920 x 1080)</item>
<item>HD (1280 x 720)</item>
<item>VGA (640 x 480)</item>
<item>QVGA (320 x 240)</item>
</string-array>
<string-array name="videoResolutionsValues">
<item>Default</item>
<item>3840 x 2160</item>
<item>1920 x 1080</item>
<item>1280 x 720</item>
<item>640 x 480</item>
<item>320 x 240</item>
</string-array>
<string-array name="cameraFps">
<item>Default</item>
<item>30 fps</item>
<item>15 fps</item>
</string-array>
<string-array name="startBitrate">
<item>Default</item>
<item>Manual</item>
</string-array>
<string-array name="videoCodecs">
<item>VP8</item>
<item>VP9</item>
<item>H264 Baseline</item>
<item>H264 High</item>
</string-array>
<string-array name="audioCodecs">
<item>OPUS</item>
<item>ISAC</item>
</string-array>
<string-array name="speakerphone">
<item>Auto (proximity sensor)</item>
<item>Enabled</item>
<item>Disabled</item>
</string-array>
<string-array name="speakerphoneValues">
<item>auto</item>
<item>true</item>
<item>false</item>
</string-array>
<string-array name="roomListContextMenu">
<item>Remove favorite</item>
</string-array>
</resources>

224
third_party/libwebrtc/examples/androidapp/res/values/strings.xml поставляемый Normal file
Просмотреть файл

@ -0,0 +1,224 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name" translatable="false">AppRTC</string>
<string name="settings_name" translatable="false">AppRTC Settings</string>
<string name="disconnect_call">Disconnect Call</string>
<string name="room_description">
Please enter a room name. Room names are shared with everyone, so think
of something unique and send it to a friend.
</string>
<string name="favorites">Favorites</string>
<string name="no_favorites">No favorites</string>
<string name="invalid_url_title">Invalid URL</string>
<string name="invalid_url_text">The URL or room name you entered resulted in an invalid URL: %1$s
</string>
<string name="channel_error_title">Connection error</string>
<string name="connecting_to">Connecting to: %1$s</string>
<string name="missing_url">FATAL ERROR: Missing URL to connect to.</string>
<string name="camera2_texture_only_error">Camera2 only supports capturing to texture. Either disable Camera2 or enable capturing to texture in the options.</string>
<string name="ok">OK</string>
<string name="switch_camera">Switch front/back camera</string>
<string name="capture_format_change_text">Slide to change capture format</string>
<string name="muted">Muted</string>
<string name="toggle_debug">Toggle debug view</string>
<string name="toggle_mic">Toggle microphone on/off</string>
<string name="action_settings">Settings</string>
<string name="action_loopback">Loopback connection</string>
<string name="connect_description">Connect to the room</string>
<string name="add_favorite_description">Add favorite</string>
<string name="format_description">%1$dx%2$d @ %3$d fps</string>
<string name="missing_permissions_try_again">The application is missing permissions. It might not work correctly. Do you want to try again?</string>
<string name="yes">Yes</string>
<string name="no">No</string>
<!-- Settings strings. -->
<string name="pref_room_key">room_preference</string>
<string name="pref_room_list_key">room_list_preference</string>
<string name="pref_videosettings_key">video_settings_key</string>
<string name="pref_videosettings_title">WebRTC video settings.</string>
<string name="pref_videocall_key">videocall_preference</string>
<string name="pref_videocall_title">Video call.</string>
<string name="pref_videocall_dlg">Enable video in a call.</string>
<string name="pref_videocall_default">true</string>
<string name="pref_screencapture_key">screencapture_preference</string>
<string name="pref_screencapture_title">Use screencapture.</string>
<string name="pref_screencapture_default">false</string>
<string name="pref_camera2_key">camera2_preference</string>
<string name="pref_camera2_title">Use Camera2.</string>
<string name="pref_camera2_default">true</string>
<string name="pref_camera2_not_supported">Not supported on this device.</string>
<string name="pref_resolution_key">resolution_preference</string>
<string name="pref_resolution_title">Video resolution.</string>
<string name="pref_resolution_dlg">Enter AppRTC local video resolution.</string>
<string name="pref_resolution_default">Default</string>
<string name="pref_fps_key">fps_preference</string>
<string name="pref_fps_title">Camera fps.</string>
<string name="pref_fps_dlg">Enter local camera fps.</string>
<string name="pref_fps_default">Default</string>
<string name="pref_capturequalityslider_key">capturequalityslider_preference</string>
<string name="pref_capturequalityslider_title">Capture quality slider.</string>
<string name="pref_capturequalityslider_dlg">Enable slider for changing capture quality.</string>
<string name="pref_capturequalityslider_default">false</string>
<string name="pref_maxvideobitrate_key">maxvideobitrate_preference</string>
<string name="pref_maxvideobitrate_title">Maximum video bitrate setting.</string>
<string name="pref_maxvideobitrate_dlg">Maximum video bitrate setting.</string>
<string name="pref_maxvideobitrate_default">Default</string>
<string name="pref_maxvideobitratevalue_key">maxvideobitratevalue_preference</string>
<string name="pref_maxvideobitratevalue_title">Video encoder maximum bitrate.</string>
<string name="pref_maxvideobitratevalue_dlg">Enter video encoder maximum bitrate in kbps.</string>
<string name="pref_maxvideobitratevalue_default">1700</string>
<string name="pref_videocodec_key">videocodec_preference</string>
<string name="pref_videocodec_title">Default video codec.</string>
<string name="pref_videocodec_dlg">Select default video codec.</string>
<string name="pref_videocodec_default">VP8</string>
<string name="pref_hwcodec_key">hwcodec_preference</string>
<string name="pref_hwcodec_title">Video codec hardware acceleration.</string>
<string name="pref_hwcodec_dlg">Use hardware accelerated video codec (if available).</string>
<string name="pref_hwcodec_default">true</string>
<string name="pref_capturetotexture_key">capturetotexture_preference</string>
<string name="pref_capturetotexture_title">Video capture to surface texture.</string>
<string name="pref_capturetotexture_dlg">Capture video to textures (if available).</string>
<string name="pref_capturetotexture_default">true</string>
<string name="pref_flexfec_key">flexfec_preference</string>
<string name="pref_flexfec_title">Codec-agnostic Flexible FEC.</string>
<string name="pref_flexfec_dlg">Enable FlexFEC.</string>
<string name="pref_flexfec_default">false</string>
<string name="pref_value_enabled">Enabled</string>
<string name="pref_value_disabled">Disabled</string>
<string name="pref_audiosettings_key">audio_settings_key</string>
<string name="pref_audiosettings_title">WebRTC audio settings.</string>
<string name="pref_startaudiobitrate_key">startaudiobitrate_preference</string>
<string name="pref_startaudiobitrate_title">Audio bitrate setting.</string>
<string name="pref_startaudiobitrate_dlg">Audio bitrate setting.</string>
<string name="pref_startaudiobitrate_default">Default</string>
<string name="pref_startaudiobitratevalue_key">startaudiobitratevalue_preference</string>
<string name="pref_startaudiobitratevalue_title">Audio codec bitrate.</string>
<string name="pref_startaudiobitratevalue_dlg">Enter audio codec bitrate in kbps.</string>
<string name="pref_startaudiobitratevalue_default">32</string>
<string name="pref_audiocodec_key">audiocodec_preference</string>
<string name="pref_audiocodec_title">Default audio codec.</string>
<string name="pref_audiocodec_dlg">Select default audio codec.</string>
<string name="pref_audiocodec_default">OPUS</string>
<string name="pref_noaudioprocessing_key">audioprocessing_preference</string>
<string name="pref_noaudioprocessing_title">Disable audio processing.</string>
<string name="pref_noaudioprocessing_dlg">Disable audio processing pipeline.</string>
<string name="pref_noaudioprocessing_default">false</string>
<string name="pref_aecdump_key">aecdump_preference</string>
<string name="pref_aecdump_title">Create aecdump.</string>
<string name="pref_aecdump_dlg">Enable diagnostic audio recordings.</string>
<string name="pref_aecdump_default">false</string>
<string name="pref_enable_save_input_audio_to_file_key">enable_key</string>
<string name="pref_enable_save_input_audio_to_file_title">Save input audio to file.</string>
<string name="pref_enable_save_input_audio_to_file_dlg">Save input audio to file.</string>
<string name="pref_enable_save_input_audio_to_file_default">false</string>
<string name="pref_opensles_key">opensles_preference</string>
<string name="pref_opensles_title">Use OpenSL ES for audio playback.</string>
<string name="pref_opensles_dlg">Use OpenSL ES for audio playback.</string>
<string name="pref_opensles_default">false</string>
<string name="pref_disable_built_in_aec_key">disable_built_in_aec_preference</string>
<string name="pref_disable_built_in_aec_title">Disable hardware AEC.</string>
<string name="pref_disable_built_in_aec_dlg">Disable hardware AEC.</string>
<string name="pref_disable_built_in_aec_default">false</string>
<string name="pref_built_in_aec_not_available">Hardware AEC is not available</string>
<string name="pref_disable_built_in_agc_key">disable_built_in_agc_preference</string>
<string name="pref_disable_built_in_agc_title">Disable hardware AGC.</string>
<string name="pref_disable_built_in_agc_dlg">Disable hardware AGC.</string>
<string name="pref_disable_built_in_agc_default">false</string>
<string name="pref_built_in_agc_not_available">Hardware AGC is not available</string>
<string name="pref_disable_built_in_ns_key">disable_built_in_ns_preference</string>
<string name="pref_disable_built_in_ns_title">Disable hardware NS.</string>
<string name="pref_disable_built_in_ns_dlg">Disable hardware NS.</string>
<string name="pref_disable_built_in_ns_default">false</string>
<string name="pref_built_in_ns_not_available">Hardware NS is not available</string>
<string name="pref_disable_webrtc_agc_and_hpf_key">disable_webrtc_agc_and_hpf_preference</string>
<string name="pref_disable_webrtc_agc_and_hpf_title">Disable WebRTC AGC and HPF.</string>
<string name="pref_disable_webrtc_agc_default">false</string>
<string name="pref_speakerphone_key">speakerphone_preference</string>
<string name="pref_speakerphone_title">Speakerphone.</string>
<string name="pref_speakerphone_dlg">Speakerphone.</string>
<string name="pref_speakerphone_default">auto</string>
<string name="pref_datasettings_key">data_settings_key</string>
<string name="pref_datasettings_title">WebRTC data channel settings.</string>
<string name="pref_enable_datachannel_key">enable_datachannel_preference</string>
<string name="pref_enable_datachannel_title">Enable datachannel.</string>
<string name="pref_enable_datachannel_default" translatable="false">true</string>
<string name="pref_ordered_key">ordered_preference</string>
<string name="pref_ordered_title">Order messages.</string>
<string name="pref_ordered_default" translatable="false">true</string>
<string name="pref_data_protocol_key">Subprotocol</string>
<string name="pref_data_protocol_title">Subprotocol.</string>
<string name="pref_data_protocol_dlg">Enter subprotocol.</string>
<string name="pref_data_protocol_default" translatable="false"></string>
<string name="pref_negotiated_key">negotiated_preference</string>
<string name="pref_negotiated_title">Negotiated.</string>
<string name="pref_negotiated_default" translatable="false">false</string>
<string name="pref_max_retransmit_time_ms_key">max_retransmit_time_ms_preference</string>
<string name="pref_max_retransmit_time_ms_title">Max delay to retransmit.</string>
<string name="pref_max_retransmit_time_ms_dlg">Enter max delay to retransmit (in ms).</string>
<string name="pref_max_retransmit_time_ms_default" translatable="false">-1</string>
<string name="pref_max_retransmits_key">max_retransmits_preference</string>
<string name="pref_max_retransmits_title">Max attempts to retransmit.</string>
<string name="pref_max_retransmits_dlg">Enter max attempts to retransmit.</string>
<string name="pref_max_retransmits_default" translatable="false">-1</string>
<string name="pref_data_id_key">data_id_preference</string>
<string name="pref_data_id_title">Data id.</string>
<string name="pref_data_id_dlg">Enter data channel id.</string>
<string name="pref_data_id_default" translatable="false">-1</string>
<string name="pref_miscsettings_key">misc_settings_key</string>
<string name="pref_miscsettings_title">Miscellaneous settings.</string>
<string name="pref_room_server_url_key">room_server_url_preference</string>
<string name="pref_room_server_url_title">Room server URL.</string>
<string name="pref_room_server_url_dlg">Enter a room server URL.</string>
<string name="pref_room_server_url_default" translatable="false">https://appr.tc</string>
<string name="pref_displayhud_key">displayhud_preference</string>
<string name="pref_displayhud_title">Display call statistics.</string>
<string name="pref_displayhud_dlg">Display call statistics.</string>
<string name="pref_displayhud_default" translatable="false">false</string>
<string name="pref_tracing_key">tracing_preference</string>
<string name="pref_tracing_title">Debug performance tracing.</string>
<string name="pref_tracing_dlg">Debug performance tracing.</string>
<string name="pref_tracing_default" translatable="false">false</string>
<string name="pref_enable_rtceventlog_key">enable_rtceventlog_key</string>
<string name="pref_enable_rtceventlog_title">Enable RtcEventLog.</string>
<string name="pref_enable_rtceventlog_default">false</string>
</resources>

Просмотреть файл

@ -0,0 +1,247 @@
<?xml version="1.0" encoding="utf-8"?>
<PreferenceScreen xmlns:android="http://schemas.android.com/apk/res/android">
<PreferenceCategory
android:key="@string/pref_videosettings_key"
android:title="@string/pref_videosettings_title">
<CheckBoxPreference
android:key="@string/pref_videocall_key"
android:title="@string/pref_videocall_title"
android:dialogTitle="@string/pref_videocall_dlg"
android:defaultValue="@string/pref_videocall_default" />
<CheckBoxPreference
android:key="@string/pref_screencapture_key"
android:title="@string/pref_screencapture_title"
android:defaultValue="@string/pref_screencapture_default" />
<CheckBoxPreference
android:key="@string/pref_camera2_key"
android:title="@string/pref_camera2_title"
android:defaultValue="@string/pref_camera2_default" />
<ListPreference
android:key="@string/pref_resolution_key"
android:title="@string/pref_resolution_title"
android:defaultValue="@string/pref_resolution_default"
android:dialogTitle="@string/pref_resolution_dlg"
android:entries="@array/videoResolutions"
android:entryValues="@array/videoResolutionsValues" />
<ListPreference
android:key="@string/pref_fps_key"
android:title="@string/pref_fps_title"
android:defaultValue="@string/pref_fps_default"
android:dialogTitle="@string/pref_fps_dlg"
android:entries="@array/cameraFps"
android:entryValues="@array/cameraFps" />
<CheckBoxPreference
android:key="@string/pref_capturequalityslider_key"
android:title="@string/pref_capturequalityslider_title"
android:dialogTitle="@string/pref_capturequalityslider_dlg"
android:defaultValue="@string/pref_capturequalityslider_default" />
<ListPreference
android:key="@string/pref_maxvideobitrate_key"
android:title="@string/pref_maxvideobitrate_title"
android:defaultValue="@string/pref_maxvideobitrate_default"
android:dialogTitle="@string/pref_maxvideobitrate_dlg"
android:entries="@array/startBitrate"
android:entryValues="@array/startBitrate" />
<EditTextPreference
android:key="@string/pref_maxvideobitratevalue_key"
android:title="@string/pref_maxvideobitratevalue_title"
android:inputType="number"
android:defaultValue="@string/pref_maxvideobitratevalue_default"
android:dialogTitle="@string/pref_maxvideobitratevalue_dlg" />
<ListPreference
android:key="@string/pref_videocodec_key"
android:title="@string/pref_videocodec_title"
android:defaultValue="@string/pref_videocodec_default"
android:dialogTitle="@string/pref_videocodec_dlg"
android:entries="@array/videoCodecs"
android:entryValues="@array/videoCodecs" />
<CheckBoxPreference
android:key="@string/pref_hwcodec_key"
android:title="@string/pref_hwcodec_title"
android:dialogTitle="@string/pref_hwcodec_dlg"
android:defaultValue="@string/pref_hwcodec_default" />
<CheckBoxPreference
android:key="@string/pref_capturetotexture_key"
android:title="@string/pref_capturetotexture_title"
android:dialogTitle="@string/pref_capturetotexture_dlg"
android:defaultValue="@string/pref_capturetotexture_default" />
<CheckBoxPreference
android:key="@string/pref_flexfec_key"
android:title="@string/pref_flexfec_title"
android:dialogTitle="@string/pref_flexfec_dlg"
android:defaultValue="@string/pref_flexfec_default" />
</PreferenceCategory>
<PreferenceCategory
android:key="@string/pref_audiosettings_key"
android:title="@string/pref_audiosettings_title">
<ListPreference
android:key="@string/pref_startaudiobitrate_key"
android:title="@string/pref_startaudiobitrate_title"
android:defaultValue="@string/pref_startaudiobitrate_default"
android:dialogTitle="@string/pref_startaudiobitrate_dlg"
android:entries="@array/startBitrate"
android:entryValues="@array/startBitrate" />
<EditTextPreference
android:key="@string/pref_startaudiobitratevalue_key"
android:title="@string/pref_startaudiobitratevalue_title"
android:inputType="number"
android:defaultValue="@string/pref_startaudiobitratevalue_default"
android:dialogTitle="@string/pref_startaudiobitratevalue_dlg" />
<ListPreference
android:key="@string/pref_audiocodec_key"
android:title="@string/pref_audiocodec_title"
android:defaultValue="@string/pref_audiocodec_default"
android:dialogTitle="@string/pref_audiocodec_dlg"
android:entries="@array/audioCodecs"
android:entryValues="@array/audioCodecs" />
<CheckBoxPreference
android:key="@string/pref_noaudioprocessing_key"
android:title="@string/pref_noaudioprocessing_title"
android:dialogTitle="@string/pref_noaudioprocessing_dlg"
android:defaultValue="@string/pref_noaudioprocessing_default" />
<CheckBoxPreference
android:key="@string/pref_aecdump_key"
android:title="@string/pref_aecdump_title"
android:dialogTitle="@string/pref_aecdump_dlg"
android:defaultValue="@string/pref_aecdump_default" />
<CheckBoxPreference
android:key="@string/pref_enable_save_input_audio_to_file_key"
android:title="@string/pref_enable_save_input_audio_to_file_title"
android:dialogTitle="@string/pref_enable_save_input_audio_to_file_dlg"
android:defaultValue="@string/pref_enable_save_input_audio_to_file_default" />
<CheckBoxPreference
android:key="@string/pref_opensles_key"
android:title="@string/pref_opensles_title"
android:dialogTitle="@string/pref_opensles_dlg"
android:defaultValue="@string/pref_opensles_default" />
<CheckBoxPreference
android:key="@string/pref_disable_built_in_aec_key"
android:title="@string/pref_disable_built_in_aec_title"
android:dialogTitle="@string/pref_disable_built_in_aec_dlg"
android:defaultValue="@string/pref_disable_built_in_aec_default" />
<CheckBoxPreference
android:key="@string/pref_disable_built_in_agc_key"
android:title="@string/pref_disable_built_in_agc_title"
android:dialogTitle="@string/pref_disable_built_in_agc_dlg"
android:defaultValue="@string/pref_disable_built_in_agc_default" />
<CheckBoxPreference
android:key="@string/pref_disable_built_in_ns_key"
android:title="@string/pref_disable_built_in_ns_title"
android:dialogTitle="@string/pref_disable_built_in_ns_dlg"
android:defaultValue="@string/pref_disable_built_in_ns_default" />
<CheckBoxPreference
android:key="@string/pref_disable_webrtc_agc_and_hpf_key"
android:title="@string/pref_disable_webrtc_agc_and_hpf_title"
android:defaultValue="@string/pref_disable_webrtc_agc_default" />
<ListPreference
android:key="@string/pref_speakerphone_key"
android:title="@string/pref_speakerphone_title"
android:defaultValue="@string/pref_speakerphone_default"
android:dialogTitle="@string/pref_speakerphone_dlg"
android:entries="@array/speakerphone"
android:entryValues="@array/speakerphoneValues" />
</PreferenceCategory>
<PreferenceCategory
android:key="@string/pref_datasettings_key"
android:title="@string/pref_datasettings_title">
<CheckBoxPreference
android:key="@string/pref_enable_datachannel_key"
android:title="@string/pref_enable_datachannel_title"
android:defaultValue="@string/pref_enable_datachannel_default" />
<CheckBoxPreference
android:key="@string/pref_ordered_key"
android:title="@string/pref_ordered_title"
android:defaultValue="@string/pref_ordered_default" />
<EditTextPreference
android:key="@string/pref_data_protocol_key"
android:title="@string/pref_data_protocol_title"
android:inputType="text"
android:defaultValue="@string/pref_data_protocol_default"
android:dialogTitle="@string/pref_data_protocol_dlg" />
<CheckBoxPreference
android:key="@string/pref_negotiated_key"
android:title="@string/pref_negotiated_title"
android:defaultValue="@string/pref_negotiated_default" />
<EditTextPreference
android:key="@string/pref_max_retransmit_time_ms_key"
android:title="@string/pref_max_retransmit_time_ms_title"
android:inputType="number"
android:defaultValue="@string/pref_max_retransmit_time_ms_default"
android:dialogTitle="@string/pref_max_retransmit_time_ms_dlg" />
<EditTextPreference
android:key="@string/pref_max_retransmits_key"
android:title="@string/pref_max_retransmits_title"
android:inputType="number"
android:defaultValue="@string/pref_max_retransmits_default"
android:dialogTitle="@string/pref_max_retransmits_dlg" />
<EditTextPreference
android:key="@string/pref_data_id_key"
android:title="@string/pref_data_id_title"
android:inputType="number"
android:defaultValue="@string/pref_data_id_default"
android:dialogTitle="@string/pref_data_id_dlg" />
</PreferenceCategory>
<PreferenceCategory
android:key="@string/pref_miscsettings_key"
android:title="@string/pref_miscsettings_title">
<EditTextPreference
android:key="@string/pref_room_server_url_key"
android:title="@string/pref_room_server_url_title"
android:inputType="text"
android:defaultValue="@string/pref_room_server_url_default"
android:dialogTitle="@string/pref_room_server_url_dlg" />
<CheckBoxPreference
android:key="@string/pref_displayhud_key"
android:title="@string/pref_displayhud_title"
android:dialogTitle="@string/pref_displayhud_dlg"
android:defaultValue="@string/pref_displayhud_default" />
<CheckBoxPreference
android:key="@string/pref_tracing_key"
android:title="@string/pref_tracing_title"
android:dialogTitle="@string/pref_tracing_dlg"
android:defaultValue="@string/pref_tracing_default" />
<CheckBoxPreference
android:key="@string/pref_enable_rtceventlog_key"
android:title="@string/pref_enable_rtceventlog_title"
android:defaultValue="@string/pref_enable_rtceventlog_default"/>
</PreferenceCategory>
</PreferenceScreen>

Просмотреть файл

@ -0,0 +1,594 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.media.AudioDeviceInfo;
import android.media.AudioManager;
import android.os.Build;
import android.preference.PreferenceManager;
import android.support.annotation.Nullable;
import android.util.Log;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import org.appspot.apprtc.util.AppRTCUtils;
import org.webrtc.ThreadUtils;
/**
* AppRTCAudioManager manages all audio related parts of the AppRTC demo.
*/
public class AppRTCAudioManager {
private static final String TAG = "AppRTCAudioManager";
private static final String SPEAKERPHONE_AUTO = "auto";
private static final String SPEAKERPHONE_TRUE = "true";
private static final String SPEAKERPHONE_FALSE = "false";
/**
* AudioDevice is the names of possible audio devices that we currently
* support.
*/
public enum AudioDevice { SPEAKER_PHONE, WIRED_HEADSET, EARPIECE, BLUETOOTH, NONE }
/** AudioManager state. */
public enum AudioManagerState {
UNINITIALIZED,
PREINITIALIZED,
RUNNING,
}
/** Selected audio device change event. */
public interface AudioManagerEvents {
// Callback fired once audio device is changed or list of available audio devices changed.
void onAudioDeviceChanged(
AudioDevice selectedAudioDevice, Set<AudioDevice> availableAudioDevices);
}
private final Context apprtcContext;
@Nullable
private AudioManager audioManager;
@Nullable
private AudioManagerEvents audioManagerEvents;
private AudioManagerState amState;
private int savedAudioMode = AudioManager.MODE_INVALID;
private boolean savedIsSpeakerPhoneOn;
private boolean savedIsMicrophoneMute;
private boolean hasWiredHeadset;
// Default audio device; speaker phone for video calls or earpiece for audio
// only calls.
private AudioDevice defaultAudioDevice;
// Contains the currently selected audio device.
// This device is changed automatically using a certain scheme where e.g.
// a wired headset "wins" over speaker phone. It is also possible for a
// user to explicitly select a device (and overrid any predefined scheme).
// See |userSelectedAudioDevice| for details.
private AudioDevice selectedAudioDevice;
// Contains the user-selected audio device which overrides the predefined
// selection scheme.
// TODO(henrika): always set to AudioDevice.NONE today. Add support for
// explicit selection based on choice by userSelectedAudioDevice.
private AudioDevice userSelectedAudioDevice;
// Contains speakerphone setting: auto, true or false
@Nullable private final String useSpeakerphone;
// Proximity sensor object. It measures the proximity of an object in cm
// relative to the view screen of a device and can therefore be used to
// assist device switching (close to ear <=> use headset earpiece if
// available, far from ear <=> use speaker phone).
@Nullable private AppRTCProximitySensor proximitySensor;
// Handles all tasks related to Bluetooth headset devices.
private final AppRTCBluetoothManager bluetoothManager;
// Contains a list of available audio devices. A Set collection is used to
// avoid duplicate elements.
private Set<AudioDevice> audioDevices = new HashSet<>();
// Broadcast receiver for wired headset intent broadcasts.
private BroadcastReceiver wiredHeadsetReceiver;
// Callback method for changes in audio focus.
@Nullable
private AudioManager.OnAudioFocusChangeListener audioFocusChangeListener;
/**
* This method is called when the proximity sensor reports a state change,
* e.g. from "NEAR to FAR" or from "FAR to NEAR".
*/
private void onProximitySensorChangedState() {
if (!useSpeakerphone.equals(SPEAKERPHONE_AUTO)) {
return;
}
// The proximity sensor should only be activated when there are exactly two
// available audio devices.
if (audioDevices.size() == 2 && audioDevices.contains(AppRTCAudioManager.AudioDevice.EARPIECE)
&& audioDevices.contains(AppRTCAudioManager.AudioDevice.SPEAKER_PHONE)) {
if (proximitySensor.sensorReportsNearState()) {
// Sensor reports that a "handset is being held up to a person's ear",
// or "something is covering the light sensor".
setAudioDeviceInternal(AppRTCAudioManager.AudioDevice.EARPIECE);
} else {
// Sensor reports that a "handset is removed from a person's ear", or
// "the light sensor is no longer covered".
setAudioDeviceInternal(AppRTCAudioManager.AudioDevice.SPEAKER_PHONE);
}
}
}
/* Receiver which handles changes in wired headset availability. */
private class WiredHeadsetReceiver extends BroadcastReceiver {
private static final int STATE_UNPLUGGED = 0;
private static final int STATE_PLUGGED = 1;
private static final int HAS_NO_MIC = 0;
private static final int HAS_MIC = 1;
@Override
public void onReceive(Context context, Intent intent) {
int state = intent.getIntExtra("state", STATE_UNPLUGGED);
int microphone = intent.getIntExtra("microphone", HAS_NO_MIC);
String name = intent.getStringExtra("name");
Log.d(TAG, "WiredHeadsetReceiver.onReceive" + AppRTCUtils.getThreadInfo() + ": "
+ "a=" + intent.getAction() + ", s="
+ (state == STATE_UNPLUGGED ? "unplugged" : "plugged") + ", m="
+ (microphone == HAS_MIC ? "mic" : "no mic") + ", n=" + name + ", sb="
+ isInitialStickyBroadcast());
hasWiredHeadset = (state == STATE_PLUGGED);
updateAudioDeviceState();
}
}
/** Construction. */
static AppRTCAudioManager create(Context context) {
return new AppRTCAudioManager(context);
}
private AppRTCAudioManager(Context context) {
Log.d(TAG, "ctor");
ThreadUtils.checkIsOnMainThread();
apprtcContext = context;
audioManager = ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
bluetoothManager = AppRTCBluetoothManager.create(context, this);
wiredHeadsetReceiver = new WiredHeadsetReceiver();
amState = AudioManagerState.UNINITIALIZED;
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context);
useSpeakerphone = sharedPreferences.getString(context.getString(R.string.pref_speakerphone_key),
context.getString(R.string.pref_speakerphone_default));
Log.d(TAG, "useSpeakerphone: " + useSpeakerphone);
if (useSpeakerphone.equals(SPEAKERPHONE_FALSE)) {
defaultAudioDevice = AudioDevice.EARPIECE;
} else {
defaultAudioDevice = AudioDevice.SPEAKER_PHONE;
}
// Create and initialize the proximity sensor.
// Tablet devices (e.g. Nexus 7) does not support proximity sensors.
// Note that, the sensor will not be active until start() has been called.
proximitySensor = AppRTCProximitySensor.create(context,
// This method will be called each time a state change is detected.
// Example: user holds their hand over the device (closer than ~5 cm),
// or removes their hand from the device.
this ::onProximitySensorChangedState);
Log.d(TAG, "defaultAudioDevice: " + defaultAudioDevice);
AppRTCUtils.logDeviceInfo(TAG);
}
@SuppressWarnings("deprecation") // TODO(henrika): audioManager.requestAudioFocus() is deprecated.
public void start(AudioManagerEvents audioManagerEvents) {
Log.d(TAG, "start");
ThreadUtils.checkIsOnMainThread();
if (amState == AudioManagerState.RUNNING) {
Log.e(TAG, "AudioManager is already active");
return;
}
// TODO(henrika): perhaps call new method called preInitAudio() here if UNINITIALIZED.
Log.d(TAG, "AudioManager starts...");
this.audioManagerEvents = audioManagerEvents;
amState = AudioManagerState.RUNNING;
// Store current audio state so we can restore it when stop() is called.
savedAudioMode = audioManager.getMode();
savedIsSpeakerPhoneOn = audioManager.isSpeakerphoneOn();
savedIsMicrophoneMute = audioManager.isMicrophoneMute();
hasWiredHeadset = hasWiredHeadset();
// Create an AudioManager.OnAudioFocusChangeListener instance.
audioFocusChangeListener = new AudioManager.OnAudioFocusChangeListener() {
// Called on the listener to notify if the audio focus for this listener has been changed.
// The |focusChange| value indicates whether the focus was gained, whether the focus was lost,
// and whether that loss is transient, or whether the new focus holder will hold it for an
// unknown amount of time.
// TODO(henrika): possibly extend support of handling audio-focus changes. Only contains
// logging for now.
@Override
public void onAudioFocusChange(int focusChange) {
final String typeOfChange;
switch (focusChange) {
case AudioManager.AUDIOFOCUS_GAIN:
typeOfChange = "AUDIOFOCUS_GAIN";
break;
case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT:
typeOfChange = "AUDIOFOCUS_GAIN_TRANSIENT";
break;
case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE:
typeOfChange = "AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE";
break;
case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK:
typeOfChange = "AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK";
break;
case AudioManager.AUDIOFOCUS_LOSS:
typeOfChange = "AUDIOFOCUS_LOSS";
break;
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
typeOfChange = "AUDIOFOCUS_LOSS_TRANSIENT";
break;
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
typeOfChange = "AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK";
break;
default:
typeOfChange = "AUDIOFOCUS_INVALID";
break;
}
Log.d(TAG, "onAudioFocusChange: " + typeOfChange);
}
};
// Request audio playout focus (without ducking) and install listener for changes in focus.
int result = audioManager.requestAudioFocus(audioFocusChangeListener,
AudioManager.STREAM_VOICE_CALL, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT);
if (result == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
Log.d(TAG, "Audio focus request granted for VOICE_CALL streams");
} else {
Log.e(TAG, "Audio focus request failed");
}
// Start by setting MODE_IN_COMMUNICATION as default audio mode. It is
// required to be in this mode when playout and/or recording starts for
// best possible VoIP performance.
audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
// Always disable microphone mute during a WebRTC call.
setMicrophoneMute(false);
// Set initial device states.
userSelectedAudioDevice = AudioDevice.NONE;
selectedAudioDevice = AudioDevice.NONE;
audioDevices.clear();
// Initialize and start Bluetooth if a BT device is available or initiate
// detection of new (enabled) BT devices.
bluetoothManager.start();
// Do initial selection of audio device. This setting can later be changed
// either by adding/removing a BT or wired headset or by covering/uncovering
// the proximity sensor.
updateAudioDeviceState();
// Register receiver for broadcast intents related to adding/removing a
// wired headset.
registerReceiver(wiredHeadsetReceiver, new IntentFilter(Intent.ACTION_HEADSET_PLUG));
Log.d(TAG, "AudioManager started");
}
@SuppressWarnings("deprecation") // TODO(henrika): audioManager.abandonAudioFocus() is deprecated.
public void stop() {
Log.d(TAG, "stop");
ThreadUtils.checkIsOnMainThread();
if (amState != AudioManagerState.RUNNING) {
Log.e(TAG, "Trying to stop AudioManager in incorrect state: " + amState);
return;
}
amState = AudioManagerState.UNINITIALIZED;
unregisterReceiver(wiredHeadsetReceiver);
bluetoothManager.stop();
// Restore previously stored audio states.
setSpeakerphoneOn(savedIsSpeakerPhoneOn);
setMicrophoneMute(savedIsMicrophoneMute);
audioManager.setMode(savedAudioMode);
// Abandon audio focus. Gives the previous focus owner, if any, focus.
audioManager.abandonAudioFocus(audioFocusChangeListener);
audioFocusChangeListener = null;
Log.d(TAG, "Abandoned audio focus for VOICE_CALL streams");
if (proximitySensor != null) {
proximitySensor.stop();
proximitySensor = null;
}
audioManagerEvents = null;
Log.d(TAG, "AudioManager stopped");
}
/** Changes selection of the currently active audio device. */
private void setAudioDeviceInternal(AudioDevice device) {
Log.d(TAG, "setAudioDeviceInternal(device=" + device + ")");
AppRTCUtils.assertIsTrue(audioDevices.contains(device));
switch (device) {
case SPEAKER_PHONE:
setSpeakerphoneOn(true);
break;
case EARPIECE:
setSpeakerphoneOn(false);
break;
case WIRED_HEADSET:
setSpeakerphoneOn(false);
break;
case BLUETOOTH:
setSpeakerphoneOn(false);
break;
default:
Log.e(TAG, "Invalid audio device selection");
break;
}
selectedAudioDevice = device;
}
/**
* Changes default audio device.
* TODO(henrika): add usage of this method in the AppRTCMobile client.
*/
public void setDefaultAudioDevice(AudioDevice defaultDevice) {
ThreadUtils.checkIsOnMainThread();
switch (defaultDevice) {
case SPEAKER_PHONE:
defaultAudioDevice = defaultDevice;
break;
case EARPIECE:
if (hasEarpiece()) {
defaultAudioDevice = defaultDevice;
} else {
defaultAudioDevice = AudioDevice.SPEAKER_PHONE;
}
break;
default:
Log.e(TAG, "Invalid default audio device selection");
break;
}
Log.d(TAG, "setDefaultAudioDevice(device=" + defaultAudioDevice + ")");
updateAudioDeviceState();
}
/** Changes selection of the currently active audio device. */
public void selectAudioDevice(AudioDevice device) {
ThreadUtils.checkIsOnMainThread();
if (!audioDevices.contains(device)) {
Log.e(TAG, "Can not select " + device + " from available " + audioDevices);
}
userSelectedAudioDevice = device;
updateAudioDeviceState();
}
/** Returns current set of available/selectable audio devices. */
public Set<AudioDevice> getAudioDevices() {
ThreadUtils.checkIsOnMainThread();
return Collections.unmodifiableSet(new HashSet<>(audioDevices));
}
/** Returns the currently selected audio device. */
public AudioDevice getSelectedAudioDevice() {
ThreadUtils.checkIsOnMainThread();
return selectedAudioDevice;
}
/** Helper method for receiver registration. */
private void registerReceiver(BroadcastReceiver receiver, IntentFilter filter) {
apprtcContext.registerReceiver(receiver, filter);
}
/** Helper method for unregistration of an existing receiver. */
private void unregisterReceiver(BroadcastReceiver receiver) {
apprtcContext.unregisterReceiver(receiver);
}
/** Sets the speaker phone mode. */
private void setSpeakerphoneOn(boolean on) {
boolean wasOn = audioManager.isSpeakerphoneOn();
if (wasOn == on) {
return;
}
audioManager.setSpeakerphoneOn(on);
}
/** Sets the microphone mute state. */
private void setMicrophoneMute(boolean on) {
boolean wasMuted = audioManager.isMicrophoneMute();
if (wasMuted == on) {
return;
}
audioManager.setMicrophoneMute(on);
}
/** Gets the current earpiece state. */
private boolean hasEarpiece() {
return apprtcContext.getPackageManager().hasSystemFeature(PackageManager.FEATURE_TELEPHONY);
}
/**
* Checks whether a wired headset is connected or not.
* This is not a valid indication that audio playback is actually over
* the wired headset as audio routing depends on other conditions. We
* only use it as an early indicator (during initialization) of an attached
* wired headset.
*/
@Deprecated
private boolean hasWiredHeadset() {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
return audioManager.isWiredHeadsetOn();
} else {
final AudioDeviceInfo[] devices = audioManager.getDevices(AudioManager.GET_DEVICES_ALL);
for (AudioDeviceInfo device : devices) {
final int type = device.getType();
if (type == AudioDeviceInfo.TYPE_WIRED_HEADSET) {
Log.d(TAG, "hasWiredHeadset: found wired headset");
return true;
} else if (type == AudioDeviceInfo.TYPE_USB_DEVICE) {
Log.d(TAG, "hasWiredHeadset: found USB audio device");
return true;
}
}
return false;
}
}
/**
* Updates list of possible audio devices and make new device selection.
* TODO(henrika): add unit test to verify all state transitions.
*/
public void updateAudioDeviceState() {
ThreadUtils.checkIsOnMainThread();
Log.d(TAG, "--- updateAudioDeviceState: "
+ "wired headset=" + hasWiredHeadset + ", "
+ "BT state=" + bluetoothManager.getState());
Log.d(TAG, "Device status: "
+ "available=" + audioDevices + ", "
+ "selected=" + selectedAudioDevice + ", "
+ "user selected=" + userSelectedAudioDevice);
// Check if any Bluetooth headset is connected. The internal BT state will
// change accordingly.
// TODO(henrika): perhaps wrap required state into BT manager.
if (bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_AVAILABLE
|| bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_UNAVAILABLE
|| bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_DISCONNECTING) {
bluetoothManager.updateDevice();
}
// Update the set of available audio devices.
Set<AudioDevice> newAudioDevices = new HashSet<>();
if (bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTED
|| bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTING
|| bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_AVAILABLE) {
newAudioDevices.add(AudioDevice.BLUETOOTH);
}
if (hasWiredHeadset) {
// If a wired headset is connected, then it is the only possible option.
newAudioDevices.add(AudioDevice.WIRED_HEADSET);
} else {
// No wired headset, hence the audio-device list can contain speaker
// phone (on a tablet), or speaker phone and earpiece (on mobile phone).
newAudioDevices.add(AudioDevice.SPEAKER_PHONE);
if (hasEarpiece()) {
newAudioDevices.add(AudioDevice.EARPIECE);
}
}
// Store state which is set to true if the device list has changed.
boolean audioDeviceSetUpdated = !audioDevices.equals(newAudioDevices);
// Update the existing audio device set.
audioDevices = newAudioDevices;
// Correct user selected audio devices if needed.
if (bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_UNAVAILABLE
&& userSelectedAudioDevice == AudioDevice.BLUETOOTH) {
// If BT is not available, it can't be the user selection.
userSelectedAudioDevice = AudioDevice.NONE;
}
if (hasWiredHeadset && userSelectedAudioDevice == AudioDevice.SPEAKER_PHONE) {
// If user selected speaker phone, but then plugged wired headset then make
// wired headset as user selected device.
userSelectedAudioDevice = AudioDevice.WIRED_HEADSET;
}
if (!hasWiredHeadset && userSelectedAudioDevice == AudioDevice.WIRED_HEADSET) {
// If user selected wired headset, but then unplugged wired headset then make
// speaker phone as user selected device.
userSelectedAudioDevice = AudioDevice.SPEAKER_PHONE;
}
// Need to start Bluetooth if it is available and user either selected it explicitly or
// user did not select any output device.
boolean needBluetoothAudioStart =
bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_AVAILABLE
&& (userSelectedAudioDevice == AudioDevice.NONE
|| userSelectedAudioDevice == AudioDevice.BLUETOOTH);
// Need to stop Bluetooth audio if user selected different device and
// Bluetooth SCO connection is established or in the process.
boolean needBluetoothAudioStop =
(bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTED
|| bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTING)
&& (userSelectedAudioDevice != AudioDevice.NONE
&& userSelectedAudioDevice != AudioDevice.BLUETOOTH);
if (bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_AVAILABLE
|| bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTING
|| bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTED) {
Log.d(TAG, "Need BT audio: start=" + needBluetoothAudioStart + ", "
+ "stop=" + needBluetoothAudioStop + ", "
+ "BT state=" + bluetoothManager.getState());
}
// Start or stop Bluetooth SCO connection given states set earlier.
if (needBluetoothAudioStop) {
bluetoothManager.stopScoAudio();
bluetoothManager.updateDevice();
}
if (needBluetoothAudioStart && !needBluetoothAudioStop) {
// Attempt to start Bluetooth SCO audio (takes a few second to start).
if (!bluetoothManager.startScoAudio()) {
// Remove BLUETOOTH from list of available devices since SCO failed.
audioDevices.remove(AudioDevice.BLUETOOTH);
audioDeviceSetUpdated = true;
}
}
// Update selected audio device.
final AudioDevice newAudioDevice;
if (bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTED) {
// If a Bluetooth is connected, then it should be used as output audio
// device. Note that it is not sufficient that a headset is available;
// an active SCO channel must also be up and running.
newAudioDevice = AudioDevice.BLUETOOTH;
} else if (hasWiredHeadset) {
// If a wired headset is connected, but Bluetooth is not, then wired headset is used as
// audio device.
newAudioDevice = AudioDevice.WIRED_HEADSET;
} else {
// No wired headset and no Bluetooth, hence the audio-device list can contain speaker
// phone (on a tablet), or speaker phone and earpiece (on mobile phone).
// |defaultAudioDevice| contains either AudioDevice.SPEAKER_PHONE or AudioDevice.EARPIECE
// depending on the user's selection.
newAudioDevice = defaultAudioDevice;
}
// Switch to new device but only if there has been any changes.
if (newAudioDevice != selectedAudioDevice || audioDeviceSetUpdated) {
// Do the required device switch.
setAudioDeviceInternal(newAudioDevice);
Log.d(TAG, "New device status: "
+ "available=" + audioDevices + ", "
+ "selected=" + newAudioDevice);
if (audioManagerEvents != null) {
// Notify a listening client that audio device has been changed.
audioManagerEvents.onAudioDeviceChanged(selectedAudioDevice, audioDevices);
}
}
Log.d(TAG, "--- updateAudioDeviceState done");
}
}

Просмотреть файл

@ -0,0 +1,532 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc;
import android.annotation.SuppressLint;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothHeadset;
import android.bluetooth.BluetoothProfile;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.pm.PackageManager;
import android.media.AudioManager;
import android.os.Handler;
import android.os.Looper;
import android.os.Process;
import android.support.annotation.Nullable;
import android.util.Log;
import java.util.List;
import java.util.Set;
import org.appspot.apprtc.util.AppRTCUtils;
import org.webrtc.ThreadUtils;
/**
* AppRTCProximitySensor manages functions related to Bluetoth devices in the
* AppRTC demo.
*/
public class AppRTCBluetoothManager {
private static final String TAG = "AppRTCBluetoothManager";
// Timeout interval for starting or stopping audio to a Bluetooth SCO device.
private static final int BLUETOOTH_SCO_TIMEOUT_MS = 4000;
// Maximum number of SCO connection attempts.
private static final int MAX_SCO_CONNECTION_ATTEMPTS = 2;
// Bluetooth connection state.
public enum State {
// Bluetooth is not available; no adapter or Bluetooth is off.
UNINITIALIZED,
// Bluetooth error happened when trying to start Bluetooth.
ERROR,
// Bluetooth proxy object for the Headset profile exists, but no connected headset devices,
// SCO is not started or disconnected.
HEADSET_UNAVAILABLE,
// Bluetooth proxy object for the Headset profile connected, connected Bluetooth headset
// present, but SCO is not started or disconnected.
HEADSET_AVAILABLE,
// Bluetooth audio SCO connection with remote device is closing.
SCO_DISCONNECTING,
// Bluetooth audio SCO connection with remote device is initiated.
SCO_CONNECTING,
// Bluetooth audio SCO connection with remote device is established.
SCO_CONNECTED
}
private final Context apprtcContext;
private final AppRTCAudioManager apprtcAudioManager;
@Nullable
private final AudioManager audioManager;
private final Handler handler;
int scoConnectionAttempts;
private State bluetoothState;
private final BluetoothProfile.ServiceListener bluetoothServiceListener;
@Nullable
private BluetoothAdapter bluetoothAdapter;
@Nullable
private BluetoothHeadset bluetoothHeadset;
@Nullable
private BluetoothDevice bluetoothDevice;
private final BroadcastReceiver bluetoothHeadsetReceiver;
// Runs when the Bluetooth timeout expires. We use that timeout after calling
// startScoAudio() or stopScoAudio() because we're not guaranteed to get a
// callback after those calls.
private final Runnable bluetoothTimeoutRunnable = new Runnable() {
@Override
public void run() {
bluetoothTimeout();
}
};
/**
* Implementation of an interface that notifies BluetoothProfile IPC clients when they have been
* connected to or disconnected from the service.
*/
private class BluetoothServiceListener implements BluetoothProfile.ServiceListener {
@Override
// Called to notify the client when the proxy object has been connected to the service.
// Once we have the profile proxy object, we can use it to monitor the state of the
// connection and perform other operations that are relevant to the headset profile.
public void onServiceConnected(int profile, BluetoothProfile proxy) {
if (profile != BluetoothProfile.HEADSET || bluetoothState == State.UNINITIALIZED) {
return;
}
Log.d(TAG, "BluetoothServiceListener.onServiceConnected: BT state=" + bluetoothState);
// Android only supports one connected Bluetooth Headset at a time.
bluetoothHeadset = (BluetoothHeadset) proxy;
updateAudioDeviceState();
Log.d(TAG, "onServiceConnected done: BT state=" + bluetoothState);
}
@Override
/** Notifies the client when the proxy object has been disconnected from the service. */
public void onServiceDisconnected(int profile) {
if (profile != BluetoothProfile.HEADSET || bluetoothState == State.UNINITIALIZED) {
return;
}
Log.d(TAG, "BluetoothServiceListener.onServiceDisconnected: BT state=" + bluetoothState);
stopScoAudio();
bluetoothHeadset = null;
bluetoothDevice = null;
bluetoothState = State.HEADSET_UNAVAILABLE;
updateAudioDeviceState();
Log.d(TAG, "onServiceDisconnected done: BT state=" + bluetoothState);
}
}
// Intent broadcast receiver which handles changes in Bluetooth device availability.
// Detects headset changes and Bluetooth SCO state changes.
private class BluetoothHeadsetBroadcastReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
if (bluetoothState == State.UNINITIALIZED) {
return;
}
final String action = intent.getAction();
// Change in connection state of the Headset profile. Note that the
// change does not tell us anything about whether we're streaming
// audio to BT over SCO. Typically received when user turns on a BT
// headset while audio is active using another audio device.
if (action.equals(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED)) {
final int state =
intent.getIntExtra(BluetoothHeadset.EXTRA_STATE, BluetoothHeadset.STATE_DISCONNECTED);
Log.d(TAG, "BluetoothHeadsetBroadcastReceiver.onReceive: "
+ "a=ACTION_CONNECTION_STATE_CHANGED, "
+ "s=" + stateToString(state) + ", "
+ "sb=" + isInitialStickyBroadcast() + ", "
+ "BT state: " + bluetoothState);
if (state == BluetoothHeadset.STATE_CONNECTED) {
scoConnectionAttempts = 0;
updateAudioDeviceState();
} else if (state == BluetoothHeadset.STATE_CONNECTING) {
// No action needed.
} else if (state == BluetoothHeadset.STATE_DISCONNECTING) {
// No action needed.
} else if (state == BluetoothHeadset.STATE_DISCONNECTED) {
// Bluetooth is probably powered off during the call.
stopScoAudio();
updateAudioDeviceState();
}
// Change in the audio (SCO) connection state of the Headset profile.
// Typically received after call to startScoAudio() has finalized.
} else if (action.equals(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED)) {
final int state = intent.getIntExtra(
BluetoothHeadset.EXTRA_STATE, BluetoothHeadset.STATE_AUDIO_DISCONNECTED);
Log.d(TAG, "BluetoothHeadsetBroadcastReceiver.onReceive: "
+ "a=ACTION_AUDIO_STATE_CHANGED, "
+ "s=" + stateToString(state) + ", "
+ "sb=" + isInitialStickyBroadcast() + ", "
+ "BT state: " + bluetoothState);
if (state == BluetoothHeadset.STATE_AUDIO_CONNECTED) {
cancelTimer();
if (bluetoothState == State.SCO_CONNECTING) {
Log.d(TAG, "+++ Bluetooth audio SCO is now connected");
bluetoothState = State.SCO_CONNECTED;
scoConnectionAttempts = 0;
updateAudioDeviceState();
} else {
Log.w(TAG, "Unexpected state BluetoothHeadset.STATE_AUDIO_CONNECTED");
}
} else if (state == BluetoothHeadset.STATE_AUDIO_CONNECTING) {
Log.d(TAG, "+++ Bluetooth audio SCO is now connecting...");
} else if (state == BluetoothHeadset.STATE_AUDIO_DISCONNECTED) {
Log.d(TAG, "+++ Bluetooth audio SCO is now disconnected");
if (isInitialStickyBroadcast()) {
Log.d(TAG, "Ignore STATE_AUDIO_DISCONNECTED initial sticky broadcast.");
return;
}
updateAudioDeviceState();
}
}
Log.d(TAG, "onReceive done: BT state=" + bluetoothState);
}
}
/** Construction. */
static AppRTCBluetoothManager create(Context context, AppRTCAudioManager audioManager) {
Log.d(TAG, "create" + AppRTCUtils.getThreadInfo());
return new AppRTCBluetoothManager(context, audioManager);
}
protected AppRTCBluetoothManager(Context context, AppRTCAudioManager audioManager) {
Log.d(TAG, "ctor");
ThreadUtils.checkIsOnMainThread();
apprtcContext = context;
apprtcAudioManager = audioManager;
this.audioManager = getAudioManager(context);
bluetoothState = State.UNINITIALIZED;
bluetoothServiceListener = new BluetoothServiceListener();
bluetoothHeadsetReceiver = new BluetoothHeadsetBroadcastReceiver();
handler = new Handler(Looper.getMainLooper());
}
/** Returns the internal state. */
public State getState() {
ThreadUtils.checkIsOnMainThread();
return bluetoothState;
}
/**
* Activates components required to detect Bluetooth devices and to enable
* BT SCO (audio is routed via BT SCO) for the headset profile. The end
* state will be HEADSET_UNAVAILABLE but a state machine has started which
* will start a state change sequence where the final outcome depends on
* if/when the BT headset is enabled.
* Example of state change sequence when start() is called while BT device
* is connected and enabled:
* UNINITIALIZED --> HEADSET_UNAVAILABLE --> HEADSET_AVAILABLE -->
* SCO_CONNECTING --> SCO_CONNECTED <==> audio is now routed via BT SCO.
* Note that the AppRTCAudioManager is also involved in driving this state
* change.
*/
public void start() {
ThreadUtils.checkIsOnMainThread();
Log.d(TAG, "start");
if (!hasPermission(apprtcContext, android.Manifest.permission.BLUETOOTH)) {
Log.w(TAG, "Process (pid=" + Process.myPid() + ") lacks BLUETOOTH permission");
return;
}
if (bluetoothState != State.UNINITIALIZED) {
Log.w(TAG, "Invalid BT state");
return;
}
bluetoothHeadset = null;
bluetoothDevice = null;
scoConnectionAttempts = 0;
// Get a handle to the default local Bluetooth adapter.
bluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
if (bluetoothAdapter == null) {
Log.w(TAG, "Device does not support Bluetooth");
return;
}
// Ensure that the device supports use of BT SCO audio for off call use cases.
if (!audioManager.isBluetoothScoAvailableOffCall()) {
Log.e(TAG, "Bluetooth SCO audio is not available off call");
return;
}
logBluetoothAdapterInfo(bluetoothAdapter);
// Establish a connection to the HEADSET profile (includes both Bluetooth Headset and
// Hands-Free) proxy object and install a listener.
if (!getBluetoothProfileProxy(
apprtcContext, bluetoothServiceListener, BluetoothProfile.HEADSET)) {
Log.e(TAG, "BluetoothAdapter.getProfileProxy(HEADSET) failed");
return;
}
// Register receivers for BluetoothHeadset change notifications.
IntentFilter bluetoothHeadsetFilter = new IntentFilter();
// Register receiver for change in connection state of the Headset profile.
bluetoothHeadsetFilter.addAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED);
// Register receiver for change in audio connection state of the Headset profile.
bluetoothHeadsetFilter.addAction(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED);
registerReceiver(bluetoothHeadsetReceiver, bluetoothHeadsetFilter);
Log.d(TAG, "HEADSET profile state: "
+ stateToString(bluetoothAdapter.getProfileConnectionState(BluetoothProfile.HEADSET)));
Log.d(TAG, "Bluetooth proxy for headset profile has started");
bluetoothState = State.HEADSET_UNAVAILABLE;
Log.d(TAG, "start done: BT state=" + bluetoothState);
}
/** Stops and closes all components related to Bluetooth audio. */
public void stop() {
ThreadUtils.checkIsOnMainThread();
Log.d(TAG, "stop: BT state=" + bluetoothState);
if (bluetoothAdapter == null) {
return;
}
// Stop BT SCO connection with remote device if needed.
stopScoAudio();
// Close down remaining BT resources.
if (bluetoothState == State.UNINITIALIZED) {
return;
}
unregisterReceiver(bluetoothHeadsetReceiver);
cancelTimer();
if (bluetoothHeadset != null) {
bluetoothAdapter.closeProfileProxy(BluetoothProfile.HEADSET, bluetoothHeadset);
bluetoothHeadset = null;
}
bluetoothAdapter = null;
bluetoothDevice = null;
bluetoothState = State.UNINITIALIZED;
Log.d(TAG, "stop done: BT state=" + bluetoothState);
}
/**
* Starts Bluetooth SCO connection with remote device.
* Note that the phone application always has the priority on the usage of the SCO connection
* for telephony. If this method is called while the phone is in call it will be ignored.
* Similarly, if a call is received or sent while an application is using the SCO connection,
* the connection will be lost for the application and NOT returned automatically when the call
* ends. Also note that: up to and including API version JELLY_BEAN_MR1, this method initiates a
* virtual voice call to the Bluetooth headset. After API version JELLY_BEAN_MR2 only a raw SCO
* audio connection is established.
* TODO(henrika): should we add support for virtual voice call to BT headset also for JBMR2 and
* higher. It might be required to initiates a virtual voice call since many devices do not
* accept SCO audio without a "call".
*/
public boolean startScoAudio() {
ThreadUtils.checkIsOnMainThread();
Log.d(TAG, "startSco: BT state=" + bluetoothState + ", "
+ "attempts: " + scoConnectionAttempts + ", "
+ "SCO is on: " + isScoOn());
if (scoConnectionAttempts >= MAX_SCO_CONNECTION_ATTEMPTS) {
Log.e(TAG, "BT SCO connection fails - no more attempts");
return false;
}
if (bluetoothState != State.HEADSET_AVAILABLE) {
Log.e(TAG, "BT SCO connection fails - no headset available");
return false;
}
// Start BT SCO channel and wait for ACTION_AUDIO_STATE_CHANGED.
Log.d(TAG, "Starting Bluetooth SCO and waits for ACTION_AUDIO_STATE_CHANGED...");
// The SCO connection establishment can take several seconds, hence we cannot rely on the
// connection to be available when the method returns but instead register to receive the
// intent ACTION_SCO_AUDIO_STATE_UPDATED and wait for the state to be SCO_AUDIO_STATE_CONNECTED.
bluetoothState = State.SCO_CONNECTING;
audioManager.startBluetoothSco();
audioManager.setBluetoothScoOn(true);
scoConnectionAttempts++;
startTimer();
Log.d(TAG, "startScoAudio done: BT state=" + bluetoothState + ", "
+ "SCO is on: " + isScoOn());
return true;
}
/** Stops Bluetooth SCO connection with remote device. */
public void stopScoAudio() {
ThreadUtils.checkIsOnMainThread();
Log.d(TAG, "stopScoAudio: BT state=" + bluetoothState + ", "
+ "SCO is on: " + isScoOn());
if (bluetoothState != State.SCO_CONNECTING && bluetoothState != State.SCO_CONNECTED) {
return;
}
cancelTimer();
audioManager.stopBluetoothSco();
audioManager.setBluetoothScoOn(false);
bluetoothState = State.SCO_DISCONNECTING;
Log.d(TAG, "stopScoAudio done: BT state=" + bluetoothState + ", "
+ "SCO is on: " + isScoOn());
}
/**
* Use the BluetoothHeadset proxy object (controls the Bluetooth Headset
* Service via IPC) to update the list of connected devices for the HEADSET
* profile. The internal state will change to HEADSET_UNAVAILABLE or to
* HEADSET_AVAILABLE and |bluetoothDevice| will be mapped to the connected
* device if available.
*/
public void updateDevice() {
if (bluetoothState == State.UNINITIALIZED || bluetoothHeadset == null) {
return;
}
Log.d(TAG, "updateDevice");
// Get connected devices for the headset profile. Returns the set of
// devices which are in state STATE_CONNECTED. The BluetoothDevice class
// is just a thin wrapper for a Bluetooth hardware address.
List<BluetoothDevice> devices = bluetoothHeadset.getConnectedDevices();
if (devices.isEmpty()) {
bluetoothDevice = null;
bluetoothState = State.HEADSET_UNAVAILABLE;
Log.d(TAG, "No connected bluetooth headset");
} else {
// Always use first device in list. Android only supports one device.
bluetoothDevice = devices.get(0);
bluetoothState = State.HEADSET_AVAILABLE;
Log.d(TAG, "Connected bluetooth headset: "
+ "name=" + bluetoothDevice.getName() + ", "
+ "state=" + stateToString(bluetoothHeadset.getConnectionState(bluetoothDevice))
+ ", SCO audio=" + bluetoothHeadset.isAudioConnected(bluetoothDevice));
}
Log.d(TAG, "updateDevice done: BT state=" + bluetoothState);
}
/**
* Stubs for test mocks.
*/
@Nullable
protected AudioManager getAudioManager(Context context) {
return (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
}
protected void registerReceiver(BroadcastReceiver receiver, IntentFilter filter) {
apprtcContext.registerReceiver(receiver, filter);
}
protected void unregisterReceiver(BroadcastReceiver receiver) {
apprtcContext.unregisterReceiver(receiver);
}
protected boolean getBluetoothProfileProxy(
Context context, BluetoothProfile.ServiceListener listener, int profile) {
return bluetoothAdapter.getProfileProxy(context, listener, profile);
}
protected boolean hasPermission(Context context, String permission) {
return apprtcContext.checkPermission(permission, Process.myPid(), Process.myUid())
== PackageManager.PERMISSION_GRANTED;
}
/** Logs the state of the local Bluetooth adapter. */
@SuppressLint("HardwareIds")
protected void logBluetoothAdapterInfo(BluetoothAdapter localAdapter) {
Log.d(TAG, "BluetoothAdapter: "
+ "enabled=" + localAdapter.isEnabled() + ", "
+ "state=" + stateToString(localAdapter.getState()) + ", "
+ "name=" + localAdapter.getName() + ", "
+ "address=" + localAdapter.getAddress());
// Log the set of BluetoothDevice objects that are bonded (paired) to the local adapter.
Set<BluetoothDevice> pairedDevices = localAdapter.getBondedDevices();
if (!pairedDevices.isEmpty()) {
Log.d(TAG, "paired devices:");
for (BluetoothDevice device : pairedDevices) {
Log.d(TAG, " name=" + device.getName() + ", address=" + device.getAddress());
}
}
}
/** Ensures that the audio manager updates its list of available audio devices. */
private void updateAudioDeviceState() {
ThreadUtils.checkIsOnMainThread();
Log.d(TAG, "updateAudioDeviceState");
apprtcAudioManager.updateAudioDeviceState();
}
/** Starts timer which times out after BLUETOOTH_SCO_TIMEOUT_MS milliseconds. */
private void startTimer() {
ThreadUtils.checkIsOnMainThread();
Log.d(TAG, "startTimer");
handler.postDelayed(bluetoothTimeoutRunnable, BLUETOOTH_SCO_TIMEOUT_MS);
}
/** Cancels any outstanding timer tasks. */
private void cancelTimer() {
ThreadUtils.checkIsOnMainThread();
Log.d(TAG, "cancelTimer");
handler.removeCallbacks(bluetoothTimeoutRunnable);
}
/**
* Called when start of the BT SCO channel takes too long time. Usually
* happens when the BT device has been turned on during an ongoing call.
*/
private void bluetoothTimeout() {
ThreadUtils.checkIsOnMainThread();
if (bluetoothState == State.UNINITIALIZED || bluetoothHeadset == null) {
return;
}
Log.d(TAG, "bluetoothTimeout: BT state=" + bluetoothState + ", "
+ "attempts: " + scoConnectionAttempts + ", "
+ "SCO is on: " + isScoOn());
if (bluetoothState != State.SCO_CONNECTING) {
return;
}
// Bluetooth SCO should be connecting; check the latest result.
boolean scoConnected = false;
List<BluetoothDevice> devices = bluetoothHeadset.getConnectedDevices();
if (devices.size() > 0) {
bluetoothDevice = devices.get(0);
if (bluetoothHeadset.isAudioConnected(bluetoothDevice)) {
Log.d(TAG, "SCO connected with " + bluetoothDevice.getName());
scoConnected = true;
} else {
Log.d(TAG, "SCO is not connected with " + bluetoothDevice.getName());
}
}
if (scoConnected) {
// We thought BT had timed out, but it's actually on; updating state.
bluetoothState = State.SCO_CONNECTED;
scoConnectionAttempts = 0;
} else {
// Give up and "cancel" our request by calling stopBluetoothSco().
Log.w(TAG, "BT failed to connect after timeout");
stopScoAudio();
}
updateAudioDeviceState();
Log.d(TAG, "bluetoothTimeout done: BT state=" + bluetoothState);
}
/** Checks whether audio uses Bluetooth SCO. */
private boolean isScoOn() {
return audioManager.isBluetoothScoOn();
}
/** Converts BluetoothAdapter states into local string representations. */
private String stateToString(int state) {
switch (state) {
case BluetoothAdapter.STATE_DISCONNECTED:
return "DISCONNECTED";
case BluetoothAdapter.STATE_CONNECTED:
return "CONNECTED";
case BluetoothAdapter.STATE_CONNECTING:
return "CONNECTING";
case BluetoothAdapter.STATE_DISCONNECTING:
return "DISCONNECTING";
case BluetoothAdapter.STATE_OFF:
return "OFF";
case BluetoothAdapter.STATE_ON:
return "ON";
case BluetoothAdapter.STATE_TURNING_OFF:
// Indicates the local Bluetooth adapter is turning off. Local clients should immediately
// attempt graceful disconnection of any remote links.
return "TURNING_OFF";
case BluetoothAdapter.STATE_TURNING_ON:
// Indicates the local Bluetooth adapter is turning on. However local clients should wait
// for STATE_ON before attempting to use the adapter.
return "TURNING_ON";
default:
return "INVALID";
}
}
}

Просмотреть файл

@ -0,0 +1,137 @@
/*
* Copyright 2013 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc;
import org.webrtc.IceCandidate;
import org.webrtc.PeerConnection;
import org.webrtc.SessionDescription;
import java.util.List;
/**
* AppRTCClient is the interface representing an AppRTC client.
*/
public interface AppRTCClient {
/**
* Struct holding the connection parameters of an AppRTC room.
*/
class RoomConnectionParameters {
public final String roomUrl;
public final String roomId;
public final boolean loopback;
public final String urlParameters;
public RoomConnectionParameters(
String roomUrl, String roomId, boolean loopback, String urlParameters) {
this.roomUrl = roomUrl;
this.roomId = roomId;
this.loopback = loopback;
this.urlParameters = urlParameters;
}
public RoomConnectionParameters(String roomUrl, String roomId, boolean loopback) {
this(roomUrl, roomId, loopback, null /* urlParameters */);
}
}
/**
* Asynchronously connect to an AppRTC room URL using supplied connection
* parameters. Once connection is established onConnectedToRoom()
* callback with room parameters is invoked.
*/
void connectToRoom(RoomConnectionParameters connectionParameters);
/**
* Send offer SDP to the other participant.
*/
void sendOfferSdp(final SessionDescription sdp);
/**
* Send answer SDP to the other participant.
*/
void sendAnswerSdp(final SessionDescription sdp);
/**
* Send Ice candidate to the other participant.
*/
void sendLocalIceCandidate(final IceCandidate candidate);
/**
* Send removed ICE candidates to the other participant.
*/
void sendLocalIceCandidateRemovals(final IceCandidate[] candidates);
/**
* Disconnect from room.
*/
void disconnectFromRoom();
/**
* Struct holding the signaling parameters of an AppRTC room.
*/
class SignalingParameters {
public final List<PeerConnection.IceServer> iceServers;
public final boolean initiator;
public final String clientId;
public final String wssUrl;
public final String wssPostUrl;
public final SessionDescription offerSdp;
public final List<IceCandidate> iceCandidates;
public SignalingParameters(List<PeerConnection.IceServer> iceServers, boolean initiator,
String clientId, String wssUrl, String wssPostUrl, SessionDescription offerSdp,
List<IceCandidate> iceCandidates) {
this.iceServers = iceServers;
this.initiator = initiator;
this.clientId = clientId;
this.wssUrl = wssUrl;
this.wssPostUrl = wssPostUrl;
this.offerSdp = offerSdp;
this.iceCandidates = iceCandidates;
}
}
/**
* Callback interface for messages delivered on signaling channel.
*
* <p>Methods are guaranteed to be invoked on the UI thread of |activity|.
*/
interface SignalingEvents {
/**
* Callback fired once the room's signaling parameters
* SignalingParameters are extracted.
*/
void onConnectedToRoom(final SignalingParameters params);
/**
* Callback fired once remote SDP is received.
*/
void onRemoteDescription(final SessionDescription sdp);
/**
* Callback fired once remote Ice candidate is received.
*/
void onRemoteIceCandidate(final IceCandidate candidate);
/**
* Callback fired once remote Ice candidate removals are received.
*/
void onRemoteIceCandidatesRemoved(final IceCandidate[] candidates);
/**
* Callback fired once channel is closed.
*/
void onChannelClose();
/**
* Callback fired once channel error happened.
*/
void onChannelError(final String description);
}
}

Просмотреть файл

@ -0,0 +1,164 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc;
import android.content.Context;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.os.Build;
import android.support.annotation.Nullable;
import android.util.Log;
import org.appspot.apprtc.util.AppRTCUtils;
import org.webrtc.ThreadUtils;
/**
* AppRTCProximitySensor manages functions related to the proximity sensor in
* the AppRTC demo.
* On most device, the proximity sensor is implemented as a boolean-sensor.
* It returns just two values "NEAR" or "FAR". Thresholding is done on the LUX
* value i.e. the LUX value of the light sensor is compared with a threshold.
* A LUX-value more than the threshold means the proximity sensor returns "FAR".
* Anything less than the threshold value and the sensor returns "NEAR".
*/
public class AppRTCProximitySensor implements SensorEventListener {
private static final String TAG = "AppRTCProximitySensor";
// This class should be created, started and stopped on one thread
// (e.g. the main thread). We use |nonThreadSafe| to ensure that this is
// the case. Only active when |DEBUG| is set to true.
private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
private final Runnable onSensorStateListener;
private final SensorManager sensorManager;
@Nullable private Sensor proximitySensor;
private boolean lastStateReportIsNear;
/** Construction */
static AppRTCProximitySensor create(Context context, Runnable sensorStateListener) {
return new AppRTCProximitySensor(context, sensorStateListener);
}
private AppRTCProximitySensor(Context context, Runnable sensorStateListener) {
Log.d(TAG, "AppRTCProximitySensor" + AppRTCUtils.getThreadInfo());
onSensorStateListener = sensorStateListener;
sensorManager = ((SensorManager) context.getSystemService(Context.SENSOR_SERVICE));
}
/**
* Activate the proximity sensor. Also do initialization if called for the
* first time.
*/
public boolean start() {
threadChecker.checkIsOnValidThread();
Log.d(TAG, "start" + AppRTCUtils.getThreadInfo());
if (!initDefaultSensor()) {
// Proximity sensor is not supported on this device.
return false;
}
sensorManager.registerListener(this, proximitySensor, SensorManager.SENSOR_DELAY_NORMAL);
return true;
}
/** Deactivate the proximity sensor. */
public void stop() {
threadChecker.checkIsOnValidThread();
Log.d(TAG, "stop" + AppRTCUtils.getThreadInfo());
if (proximitySensor == null) {
return;
}
sensorManager.unregisterListener(this, proximitySensor);
}
/** Getter for last reported state. Set to true if "near" is reported. */
public boolean sensorReportsNearState() {
threadChecker.checkIsOnValidThread();
return lastStateReportIsNear;
}
@Override
public final void onAccuracyChanged(Sensor sensor, int accuracy) {
threadChecker.checkIsOnValidThread();
AppRTCUtils.assertIsTrue(sensor.getType() == Sensor.TYPE_PROXIMITY);
if (accuracy == SensorManager.SENSOR_STATUS_UNRELIABLE) {
Log.e(TAG, "The values returned by this sensor cannot be trusted");
}
}
@Override
public final void onSensorChanged(SensorEvent event) {
threadChecker.checkIsOnValidThread();
AppRTCUtils.assertIsTrue(event.sensor.getType() == Sensor.TYPE_PROXIMITY);
// As a best practice; do as little as possible within this method and
// avoid blocking.
float distanceInCentimeters = event.values[0];
if (distanceInCentimeters < proximitySensor.getMaximumRange()) {
Log.d(TAG, "Proximity sensor => NEAR state");
lastStateReportIsNear = true;
} else {
Log.d(TAG, "Proximity sensor => FAR state");
lastStateReportIsNear = false;
}
// Report about new state to listening client. Client can then call
// sensorReportsNearState() to query the current state (NEAR or FAR).
if (onSensorStateListener != null) {
onSensorStateListener.run();
}
Log.d(TAG, "onSensorChanged" + AppRTCUtils.getThreadInfo() + ": "
+ "accuracy=" + event.accuracy + ", timestamp=" + event.timestamp + ", distance="
+ event.values[0]);
}
/**
* Get default proximity sensor if it exists. Tablet devices (e.g. Nexus 7)
* does not support this type of sensor and false will be returned in such
* cases.
*/
private boolean initDefaultSensor() {
if (proximitySensor != null) {
return true;
}
proximitySensor = sensorManager.getDefaultSensor(Sensor.TYPE_PROXIMITY);
if (proximitySensor == null) {
return false;
}
logProximitySensorInfo();
return true;
}
/** Helper method for logging information about the proximity sensor. */
private void logProximitySensorInfo() {
if (proximitySensor == null) {
return;
}
StringBuilder info = new StringBuilder("Proximity sensor: ");
info.append("name=").append(proximitySensor.getName());
info.append(", vendor: ").append(proximitySensor.getVendor());
info.append(", power: ").append(proximitySensor.getPower());
info.append(", resolution: ").append(proximitySensor.getResolution());
info.append(", max range: ").append(proximitySensor.getMaximumRange());
info.append(", min delay: ").append(proximitySensor.getMinDelay());
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT_WATCH) {
// Added in API level 20.
info.append(", type: ").append(proximitySensor.getStringType());
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
// Added in API level 21.
info.append(", max delay: ").append(proximitySensor.getMaxDelay());
info.append(", reporting mode: ").append(proximitySensor.getReportingMode());
info.append(", isWakeUpSensor: ").append(proximitySensor.isWakeUpSensor());
}
Log.d(TAG, info.toString());
}
}

Просмотреть файл

@ -0,0 +1,969 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc;
import android.annotation.TargetApi;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.FragmentTransaction;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.media.projection.MediaProjection;
import android.media.projection.MediaProjectionManager;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.support.annotation.Nullable;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.view.WindowManager.LayoutParams;
import android.widget.Toast;
import java.io.IOException;
import java.lang.RuntimeException;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import org.appspot.apprtc.AppRTCAudioManager.AudioDevice;
import org.appspot.apprtc.AppRTCAudioManager.AudioManagerEvents;
import org.appspot.apprtc.AppRTCClient.RoomConnectionParameters;
import org.appspot.apprtc.AppRTCClient.SignalingParameters;
import org.appspot.apprtc.PeerConnectionClient.DataChannelParameters;
import org.appspot.apprtc.PeerConnectionClient.PeerConnectionParameters;
import org.webrtc.Camera1Enumerator;
import org.webrtc.Camera2Enumerator;
import org.webrtc.CameraEnumerator;
import org.webrtc.EglBase;
import org.webrtc.FileVideoCapturer;
import org.webrtc.IceCandidate;
import org.webrtc.Logging;
import org.webrtc.PeerConnectionFactory;
import org.webrtc.RendererCommon.ScalingType;
import org.webrtc.ScreenCapturerAndroid;
import org.webrtc.SessionDescription;
import org.webrtc.StatsReport;
import org.webrtc.SurfaceViewRenderer;
import org.webrtc.VideoCapturer;
import org.webrtc.VideoFileRenderer;
import org.webrtc.VideoFrame;
import org.webrtc.VideoSink;
/**
* Activity for peer connection call setup, call waiting
* and call view.
*/
public class CallActivity extends Activity implements AppRTCClient.SignalingEvents,
PeerConnectionClient.PeerConnectionEvents,
CallFragment.OnCallEvents {
private static final String TAG = "CallRTCClient";
public static final String EXTRA_ROOMID = "org.appspot.apprtc.ROOMID";
public static final String EXTRA_URLPARAMETERS = "org.appspot.apprtc.URLPARAMETERS";
public static final String EXTRA_LOOPBACK = "org.appspot.apprtc.LOOPBACK";
public static final String EXTRA_VIDEO_CALL = "org.appspot.apprtc.VIDEO_CALL";
public static final String EXTRA_SCREENCAPTURE = "org.appspot.apprtc.SCREENCAPTURE";
public static final String EXTRA_CAMERA2 = "org.appspot.apprtc.CAMERA2";
public static final String EXTRA_VIDEO_WIDTH = "org.appspot.apprtc.VIDEO_WIDTH";
public static final String EXTRA_VIDEO_HEIGHT = "org.appspot.apprtc.VIDEO_HEIGHT";
public static final String EXTRA_VIDEO_FPS = "org.appspot.apprtc.VIDEO_FPS";
public static final String EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED =
"org.appsopt.apprtc.VIDEO_CAPTUREQUALITYSLIDER";
public static final String EXTRA_VIDEO_BITRATE = "org.appspot.apprtc.VIDEO_BITRATE";
public static final String EXTRA_VIDEOCODEC = "org.appspot.apprtc.VIDEOCODEC";
public static final String EXTRA_HWCODEC_ENABLED = "org.appspot.apprtc.HWCODEC";
public static final String EXTRA_CAPTURETOTEXTURE_ENABLED = "org.appspot.apprtc.CAPTURETOTEXTURE";
public static final String EXTRA_FLEXFEC_ENABLED = "org.appspot.apprtc.FLEXFEC";
public static final String EXTRA_AUDIO_BITRATE = "org.appspot.apprtc.AUDIO_BITRATE";
public static final String EXTRA_AUDIOCODEC = "org.appspot.apprtc.AUDIOCODEC";
public static final String EXTRA_NOAUDIOPROCESSING_ENABLED =
"org.appspot.apprtc.NOAUDIOPROCESSING";
public static final String EXTRA_AECDUMP_ENABLED = "org.appspot.apprtc.AECDUMP";
public static final String EXTRA_SAVE_INPUT_AUDIO_TO_FILE_ENABLED =
"org.appspot.apprtc.SAVE_INPUT_AUDIO_TO_FILE";
public static final String EXTRA_OPENSLES_ENABLED = "org.appspot.apprtc.OPENSLES";
public static final String EXTRA_DISABLE_BUILT_IN_AEC = "org.appspot.apprtc.DISABLE_BUILT_IN_AEC";
public static final String EXTRA_DISABLE_BUILT_IN_AGC = "org.appspot.apprtc.DISABLE_BUILT_IN_AGC";
public static final String EXTRA_DISABLE_BUILT_IN_NS = "org.appspot.apprtc.DISABLE_BUILT_IN_NS";
public static final String EXTRA_DISABLE_WEBRTC_AGC_AND_HPF =
"org.appspot.apprtc.DISABLE_WEBRTC_GAIN_CONTROL";
public static final String EXTRA_DISPLAY_HUD = "org.appspot.apprtc.DISPLAY_HUD";
public static final String EXTRA_TRACING = "org.appspot.apprtc.TRACING";
public static final String EXTRA_CMDLINE = "org.appspot.apprtc.CMDLINE";
public static final String EXTRA_RUNTIME = "org.appspot.apprtc.RUNTIME";
public static final String EXTRA_VIDEO_FILE_AS_CAMERA = "org.appspot.apprtc.VIDEO_FILE_AS_CAMERA";
public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE =
"org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE";
public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH =
"org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_WIDTH";
public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT =
"org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT";
public static final String EXTRA_USE_VALUES_FROM_INTENT =
"org.appspot.apprtc.USE_VALUES_FROM_INTENT";
public static final String EXTRA_DATA_CHANNEL_ENABLED = "org.appspot.apprtc.DATA_CHANNEL_ENABLED";
public static final String EXTRA_ORDERED = "org.appspot.apprtc.ORDERED";
public static final String EXTRA_MAX_RETRANSMITS_MS = "org.appspot.apprtc.MAX_RETRANSMITS_MS";
public static final String EXTRA_MAX_RETRANSMITS = "org.appspot.apprtc.MAX_RETRANSMITS";
public static final String EXTRA_PROTOCOL = "org.appspot.apprtc.PROTOCOL";
public static final String EXTRA_NEGOTIATED = "org.appspot.apprtc.NEGOTIATED";
public static final String EXTRA_ID = "org.appspot.apprtc.ID";
public static final String EXTRA_ENABLE_RTCEVENTLOG = "org.appspot.apprtc.ENABLE_RTCEVENTLOG";
private static final int CAPTURE_PERMISSION_REQUEST_CODE = 1;
// List of mandatory application permissions.
private static final String[] MANDATORY_PERMISSIONS = {"android.permission.MODIFY_AUDIO_SETTINGS",
"android.permission.RECORD_AUDIO", "android.permission.INTERNET"};
// Peer connection statistics callback period in ms.
private static final int STAT_CALLBACK_PERIOD = 1000;
private static class ProxyVideoSink implements VideoSink {
private VideoSink target;
@Override
synchronized public void onFrame(VideoFrame frame) {
if (target == null) {
Logging.d(TAG, "Dropping frame in proxy because target is null.");
return;
}
target.onFrame(frame);
}
synchronized public void setTarget(VideoSink target) {
this.target = target;
}
}
private final ProxyVideoSink remoteProxyRenderer = new ProxyVideoSink();
private final ProxyVideoSink localProxyVideoSink = new ProxyVideoSink();
@Nullable private PeerConnectionClient peerConnectionClient;
@Nullable
private AppRTCClient appRtcClient;
@Nullable
private SignalingParameters signalingParameters;
@Nullable private AppRTCAudioManager audioManager;
@Nullable
private SurfaceViewRenderer pipRenderer;
@Nullable
private SurfaceViewRenderer fullscreenRenderer;
@Nullable
private VideoFileRenderer videoFileRenderer;
private final List<VideoSink> remoteSinks = new ArrayList<>();
private Toast logToast;
private boolean commandLineRun;
private boolean activityRunning;
private RoomConnectionParameters roomConnectionParameters;
@Nullable
private PeerConnectionParameters peerConnectionParameters;
private boolean connected;
private boolean isError;
private boolean callControlFragmentVisible = true;
private long callStartedTimeMs;
private boolean micEnabled = true;
private boolean screencaptureEnabled;
private static Intent mediaProjectionPermissionResultData;
private static int mediaProjectionPermissionResultCode;
// True if local view is in the fullscreen renderer.
private boolean isSwappedFeeds;
// Controls
private CallFragment callFragment;
private HudFragment hudFragment;
private CpuMonitor cpuMonitor;
@Override
// TODO(bugs.webrtc.org/8580): LayoutParams.FLAG_TURN_SCREEN_ON and
// LayoutParams.FLAG_SHOW_WHEN_LOCKED are deprecated.
@SuppressWarnings("deprecation")
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Thread.setDefaultUncaughtExceptionHandler(new UnhandledExceptionHandler(this));
// Set window styles for fullscreen-window size. Needs to be done before
// adding content.
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(LayoutParams.FLAG_FULLSCREEN | LayoutParams.FLAG_KEEP_SCREEN_ON
| LayoutParams.FLAG_SHOW_WHEN_LOCKED | LayoutParams.FLAG_TURN_SCREEN_ON);
getWindow().getDecorView().setSystemUiVisibility(getSystemUiVisibility());
setContentView(R.layout.activity_call);
connected = false;
signalingParameters = null;
// Create UI controls.
pipRenderer = findViewById(R.id.pip_video_view);
fullscreenRenderer = findViewById(R.id.fullscreen_video_view);
callFragment = new CallFragment();
hudFragment = new HudFragment();
// Show/hide call control fragment on view click.
View.OnClickListener listener = new View.OnClickListener() {
@Override
public void onClick(View view) {
toggleCallControlFragmentVisibility();
}
};
// Swap feeds on pip view click.
pipRenderer.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
setSwappedFeeds(!isSwappedFeeds);
}
});
fullscreenRenderer.setOnClickListener(listener);
remoteSinks.add(remoteProxyRenderer);
final Intent intent = getIntent();
final EglBase eglBase = EglBase.create();
// Create video renderers.
pipRenderer.init(eglBase.getEglBaseContext(), null);
pipRenderer.setScalingType(ScalingType.SCALE_ASPECT_FIT);
String saveRemoteVideoToFile = intent.getStringExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE);
// When saveRemoteVideoToFile is set we save the video from the remote to a file.
if (saveRemoteVideoToFile != null) {
int videoOutWidth = intent.getIntExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, 0);
int videoOutHeight = intent.getIntExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, 0);
try {
videoFileRenderer = new VideoFileRenderer(
saveRemoteVideoToFile, videoOutWidth, videoOutHeight, eglBase.getEglBaseContext());
remoteSinks.add(videoFileRenderer);
} catch (IOException e) {
throw new RuntimeException(
"Failed to open video file for output: " + saveRemoteVideoToFile, e);
}
}
fullscreenRenderer.init(eglBase.getEglBaseContext(), null);
fullscreenRenderer.setScalingType(ScalingType.SCALE_ASPECT_FILL);
pipRenderer.setZOrderMediaOverlay(true);
pipRenderer.setEnableHardwareScaler(true /* enabled */);
fullscreenRenderer.setEnableHardwareScaler(false /* enabled */);
// Start with local feed in fullscreen and swap it to the pip when the call is connected.
setSwappedFeeds(true /* isSwappedFeeds */);
// Check for mandatory permissions.
for (String permission : MANDATORY_PERMISSIONS) {
if (checkCallingOrSelfPermission(permission) != PackageManager.PERMISSION_GRANTED) {
logAndToast("Permission " + permission + " is not granted");
setResult(RESULT_CANCELED);
finish();
return;
}
}
Uri roomUri = intent.getData();
if (roomUri == null) {
logAndToast(getString(R.string.missing_url));
Log.e(TAG, "Didn't get any URL in intent!");
setResult(RESULT_CANCELED);
finish();
return;
}
// Get Intent parameters.
String roomId = intent.getStringExtra(EXTRA_ROOMID);
Log.d(TAG, "Room ID: " + roomId);
if (roomId == null || roomId.length() == 0) {
logAndToast(getString(R.string.missing_url));
Log.e(TAG, "Incorrect room ID in intent!");
setResult(RESULT_CANCELED);
finish();
return;
}
boolean loopback = intent.getBooleanExtra(EXTRA_LOOPBACK, false);
boolean tracing = intent.getBooleanExtra(EXTRA_TRACING, false);
int videoWidth = intent.getIntExtra(EXTRA_VIDEO_WIDTH, 0);
int videoHeight = intent.getIntExtra(EXTRA_VIDEO_HEIGHT, 0);
screencaptureEnabled = intent.getBooleanExtra(EXTRA_SCREENCAPTURE, false);
// If capturing format is not specified for screencapture, use screen resolution.
if (screencaptureEnabled && videoWidth == 0 && videoHeight == 0) {
DisplayMetrics displayMetrics = getDisplayMetrics();
videoWidth = displayMetrics.widthPixels;
videoHeight = displayMetrics.heightPixels;
}
DataChannelParameters dataChannelParameters = null;
if (intent.getBooleanExtra(EXTRA_DATA_CHANNEL_ENABLED, false)) {
dataChannelParameters = new DataChannelParameters(intent.getBooleanExtra(EXTRA_ORDERED, true),
intent.getIntExtra(EXTRA_MAX_RETRANSMITS_MS, -1),
intent.getIntExtra(EXTRA_MAX_RETRANSMITS, -1), intent.getStringExtra(EXTRA_PROTOCOL),
intent.getBooleanExtra(EXTRA_NEGOTIATED, false), intent.getIntExtra(EXTRA_ID, -1));
}
peerConnectionParameters =
new PeerConnectionParameters(intent.getBooleanExtra(EXTRA_VIDEO_CALL, true), loopback,
tracing, videoWidth, videoHeight, intent.getIntExtra(EXTRA_VIDEO_FPS, 0),
intent.getIntExtra(EXTRA_VIDEO_BITRATE, 0), intent.getStringExtra(EXTRA_VIDEOCODEC),
intent.getBooleanExtra(EXTRA_HWCODEC_ENABLED, true),
intent.getBooleanExtra(EXTRA_FLEXFEC_ENABLED, false),
intent.getIntExtra(EXTRA_AUDIO_BITRATE, 0), intent.getStringExtra(EXTRA_AUDIOCODEC),
intent.getBooleanExtra(EXTRA_NOAUDIOPROCESSING_ENABLED, false),
intent.getBooleanExtra(EXTRA_AECDUMP_ENABLED, false),
intent.getBooleanExtra(EXTRA_SAVE_INPUT_AUDIO_TO_FILE_ENABLED, false),
intent.getBooleanExtra(EXTRA_OPENSLES_ENABLED, false),
intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_AEC, false),
intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_AGC, false),
intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_NS, false),
intent.getBooleanExtra(EXTRA_DISABLE_WEBRTC_AGC_AND_HPF, false),
intent.getBooleanExtra(EXTRA_ENABLE_RTCEVENTLOG, false), dataChannelParameters);
commandLineRun = intent.getBooleanExtra(EXTRA_CMDLINE, false);
int runTimeMs = intent.getIntExtra(EXTRA_RUNTIME, 0);
Log.d(TAG, "VIDEO_FILE: '" + intent.getStringExtra(EXTRA_VIDEO_FILE_AS_CAMERA) + "'");
// Create connection client. Use DirectRTCClient if room name is an IP otherwise use the
// standard WebSocketRTCClient.
if (loopback || !DirectRTCClient.IP_PATTERN.matcher(roomId).matches()) {
appRtcClient = new WebSocketRTCClient(this);
} else {
Log.i(TAG, "Using DirectRTCClient because room name looks like an IP.");
appRtcClient = new DirectRTCClient(this);
}
// Create connection parameters.
String urlParameters = intent.getStringExtra(EXTRA_URLPARAMETERS);
roomConnectionParameters =
new RoomConnectionParameters(roomUri.toString(), roomId, loopback, urlParameters);
// Create CPU monitor
if (CpuMonitor.isSupported()) {
cpuMonitor = new CpuMonitor(this);
hudFragment.setCpuMonitor(cpuMonitor);
}
// Send intent arguments to fragments.
callFragment.setArguments(intent.getExtras());
hudFragment.setArguments(intent.getExtras());
// Activate call and HUD fragments and start the call.
FragmentTransaction ft = getFragmentManager().beginTransaction();
ft.add(R.id.call_fragment_container, callFragment);
ft.add(R.id.hud_fragment_container, hudFragment);
ft.commit();
// For command line execution run connection for <runTimeMs> and exit.
if (commandLineRun && runTimeMs > 0) {
(new Handler()).postDelayed(new Runnable() {
@Override
public void run() {
disconnect();
}
}, runTimeMs);
}
// Create peer connection client.
peerConnectionClient = new PeerConnectionClient(
getApplicationContext(), eglBase, peerConnectionParameters, CallActivity.this);
PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
if (loopback) {
options.networkIgnoreMask = 0;
}
peerConnectionClient.createPeerConnectionFactory(options);
if (screencaptureEnabled) {
startScreenCapture();
} else {
startCall();
}
}
@TargetApi(17)
private DisplayMetrics getDisplayMetrics() {
DisplayMetrics displayMetrics = new DisplayMetrics();
WindowManager windowManager =
(WindowManager) getApplication().getSystemService(Context.WINDOW_SERVICE);
windowManager.getDefaultDisplay().getRealMetrics(displayMetrics);
return displayMetrics;
}
@TargetApi(19)
private static int getSystemUiVisibility() {
int flags = View.SYSTEM_UI_FLAG_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_FULLSCREEN;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
flags |= View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY;
}
return flags;
}
@TargetApi(21)
private void startScreenCapture() {
MediaProjectionManager mediaProjectionManager =
(MediaProjectionManager) getApplication().getSystemService(
Context.MEDIA_PROJECTION_SERVICE);
startActivityForResult(
mediaProjectionManager.createScreenCaptureIntent(), CAPTURE_PERMISSION_REQUEST_CODE);
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode != CAPTURE_PERMISSION_REQUEST_CODE)
return;
mediaProjectionPermissionResultCode = resultCode;
mediaProjectionPermissionResultData = data;
startCall();
}
private boolean useCamera2() {
return Camera2Enumerator.isSupported(this) && getIntent().getBooleanExtra(EXTRA_CAMERA2, true);
}
private boolean captureToTexture() {
return getIntent().getBooleanExtra(EXTRA_CAPTURETOTEXTURE_ENABLED, false);
}
private @Nullable VideoCapturer createCameraCapturer(CameraEnumerator enumerator) {
final String[] deviceNames = enumerator.getDeviceNames();
// First, try to find front facing camera
Logging.d(TAG, "Looking for front facing cameras.");
for (String deviceName : deviceNames) {
if (enumerator.isFrontFacing(deviceName)) {
Logging.d(TAG, "Creating front facing camera capturer.");
VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
if (videoCapturer != null) {
return videoCapturer;
}
}
}
// Front facing camera not found, try something else
Logging.d(TAG, "Looking for other cameras.");
for (String deviceName : deviceNames) {
if (!enumerator.isFrontFacing(deviceName)) {
Logging.d(TAG, "Creating other camera capturer.");
VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
if (videoCapturer != null) {
return videoCapturer;
}
}
}
return null;
}
@TargetApi(21)
private @Nullable VideoCapturer createScreenCapturer() {
if (mediaProjectionPermissionResultCode != Activity.RESULT_OK) {
reportError("User didn't give permission to capture the screen.");
return null;
}
return new ScreenCapturerAndroid(
mediaProjectionPermissionResultData, new MediaProjection.Callback() {
@Override
public void onStop() {
reportError("User revoked permission to capture the screen.");
}
});
}
// Activity interfaces
@Override
public void onStop() {
super.onStop();
activityRunning = false;
// Don't stop the video when using screencapture to allow user to show other apps to the remote
// end.
if (peerConnectionClient != null && !screencaptureEnabled) {
peerConnectionClient.stopVideoSource();
}
if (cpuMonitor != null) {
cpuMonitor.pause();
}
}
@Override
public void onStart() {
super.onStart();
activityRunning = true;
// Video is not paused for screencapture. See onPause.
if (peerConnectionClient != null && !screencaptureEnabled) {
peerConnectionClient.startVideoSource();
}
if (cpuMonitor != null) {
cpuMonitor.resume();
}
}
@Override
protected void onDestroy() {
Thread.setDefaultUncaughtExceptionHandler(null);
disconnect();
if (logToast != null) {
logToast.cancel();
}
activityRunning = false;
super.onDestroy();
}
// CallFragment.OnCallEvents interface implementation.
@Override
public void onCallHangUp() {
disconnect();
}
@Override
public void onCameraSwitch() {
if (peerConnectionClient != null) {
peerConnectionClient.switchCamera();
}
}
@Override
public void onVideoScalingSwitch(ScalingType scalingType) {
fullscreenRenderer.setScalingType(scalingType);
}
@Override
public void onCaptureFormatChange(int width, int height, int framerate) {
if (peerConnectionClient != null) {
peerConnectionClient.changeCaptureFormat(width, height, framerate);
}
}
@Override
public boolean onToggleMic() {
if (peerConnectionClient != null) {
micEnabled = !micEnabled;
peerConnectionClient.setAudioEnabled(micEnabled);
}
return micEnabled;
}
// Helper functions.
private void toggleCallControlFragmentVisibility() {
if (!connected || !callFragment.isAdded()) {
return;
}
// Show/hide call control fragment
callControlFragmentVisible = !callControlFragmentVisible;
FragmentTransaction ft = getFragmentManager().beginTransaction();
if (callControlFragmentVisible) {
ft.show(callFragment);
ft.show(hudFragment);
} else {
ft.hide(callFragment);
ft.hide(hudFragment);
}
ft.setTransition(FragmentTransaction.TRANSIT_FRAGMENT_FADE);
ft.commit();
}
private void startCall() {
if (appRtcClient == null) {
Log.e(TAG, "AppRTC client is not allocated for a call.");
return;
}
callStartedTimeMs = System.currentTimeMillis();
// Start room connection.
logAndToast(getString(R.string.connecting_to, roomConnectionParameters.roomUrl));
appRtcClient.connectToRoom(roomConnectionParameters);
// Create and audio manager that will take care of audio routing,
// audio modes, audio device enumeration etc.
audioManager = AppRTCAudioManager.create(getApplicationContext());
// Store existing audio settings and change audio mode to
// MODE_IN_COMMUNICATION for best possible VoIP performance.
Log.d(TAG, "Starting the audio manager...");
audioManager.start(new AudioManagerEvents() {
// This method will be called each time the number of available audio
// devices has changed.
@Override
public void onAudioDeviceChanged(
AudioDevice audioDevice, Set<AudioDevice> availableAudioDevices) {
onAudioManagerDevicesChanged(audioDevice, availableAudioDevices);
}
});
}
// Should be called from UI thread
private void callConnected() {
final long delta = System.currentTimeMillis() - callStartedTimeMs;
Log.i(TAG, "Call connected: delay=" + delta + "ms");
if (peerConnectionClient == null || isError) {
Log.w(TAG, "Call is connected in closed or error state");
return;
}
// Enable statistics callback.
peerConnectionClient.enableStatsEvents(true, STAT_CALLBACK_PERIOD);
setSwappedFeeds(false /* isSwappedFeeds */);
}
// This method is called when the audio manager reports audio device change,
// e.g. from wired headset to speakerphone.
private void onAudioManagerDevicesChanged(
final AudioDevice device, final Set<AudioDevice> availableDevices) {
Log.d(TAG, "onAudioManagerDevicesChanged: " + availableDevices + ", "
+ "selected: " + device);
// TODO(henrika): add callback handler.
}
// Disconnect from remote resources, dispose of local resources, and exit.
private void disconnect() {
activityRunning = false;
remoteProxyRenderer.setTarget(null);
localProxyVideoSink.setTarget(null);
if (appRtcClient != null) {
appRtcClient.disconnectFromRoom();
appRtcClient = null;
}
if (pipRenderer != null) {
pipRenderer.release();
pipRenderer = null;
}
if (videoFileRenderer != null) {
videoFileRenderer.release();
videoFileRenderer = null;
}
if (fullscreenRenderer != null) {
fullscreenRenderer.release();
fullscreenRenderer = null;
}
if (peerConnectionClient != null) {
peerConnectionClient.close();
peerConnectionClient = null;
}
if (audioManager != null) {
audioManager.stop();
audioManager = null;
}
if (connected && !isError) {
setResult(RESULT_OK);
} else {
setResult(RESULT_CANCELED);
}
finish();
}
private void disconnectWithErrorMessage(final String errorMessage) {
if (commandLineRun || !activityRunning) {
Log.e(TAG, "Critical error: " + errorMessage);
disconnect();
} else {
new AlertDialog.Builder(this)
.setTitle(getText(R.string.channel_error_title))
.setMessage(errorMessage)
.setCancelable(false)
.setNeutralButton(R.string.ok,
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
disconnect();
}
})
.create()
.show();
}
}
// Log |msg| and Toast about it.
private void logAndToast(String msg) {
Log.d(TAG, msg);
if (logToast != null) {
logToast.cancel();
}
logToast = Toast.makeText(this, msg, Toast.LENGTH_SHORT);
logToast.show();
}
private void reportError(final String description) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (!isError) {
isError = true;
disconnectWithErrorMessage(description);
}
}
});
}
private @Nullable VideoCapturer createVideoCapturer() {
final VideoCapturer videoCapturer;
String videoFileAsCamera = getIntent().getStringExtra(EXTRA_VIDEO_FILE_AS_CAMERA);
if (videoFileAsCamera != null) {
try {
videoCapturer = new FileVideoCapturer(videoFileAsCamera);
} catch (IOException e) {
reportError("Failed to open video file for emulated camera");
return null;
}
} else if (screencaptureEnabled) {
return createScreenCapturer();
} else if (useCamera2()) {
if (!captureToTexture()) {
reportError(getString(R.string.camera2_texture_only_error));
return null;
}
Logging.d(TAG, "Creating capturer using camera2 API.");
videoCapturer = createCameraCapturer(new Camera2Enumerator(this));
} else {
Logging.d(TAG, "Creating capturer using camera1 API.");
videoCapturer = createCameraCapturer(new Camera1Enumerator(captureToTexture()));
}
if (videoCapturer == null) {
reportError("Failed to open camera");
return null;
}
return videoCapturer;
}
private void setSwappedFeeds(boolean isSwappedFeeds) {
Logging.d(TAG, "setSwappedFeeds: " + isSwappedFeeds);
this.isSwappedFeeds = isSwappedFeeds;
localProxyVideoSink.setTarget(isSwappedFeeds ? fullscreenRenderer : pipRenderer);
remoteProxyRenderer.setTarget(isSwappedFeeds ? pipRenderer : fullscreenRenderer);
fullscreenRenderer.setMirror(isSwappedFeeds);
pipRenderer.setMirror(!isSwappedFeeds);
}
// -----Implementation of AppRTCClient.AppRTCSignalingEvents ---------------
// All callbacks are invoked from websocket signaling looper thread and
// are routed to UI thread.
private void onConnectedToRoomInternal(final SignalingParameters params) {
final long delta = System.currentTimeMillis() - callStartedTimeMs;
signalingParameters = params;
logAndToast("Creating peer connection, delay=" + delta + "ms");
VideoCapturer videoCapturer = null;
if (peerConnectionParameters.videoCallEnabled) {
videoCapturer = createVideoCapturer();
}
peerConnectionClient.createPeerConnection(
localProxyVideoSink, remoteSinks, videoCapturer, signalingParameters);
if (signalingParameters.initiator) {
logAndToast("Creating OFFER...");
// Create offer. Offer SDP will be sent to answering client in
// PeerConnectionEvents.onLocalDescription event.
peerConnectionClient.createOffer();
} else {
if (params.offerSdp != null) {
peerConnectionClient.setRemoteDescription(params.offerSdp);
logAndToast("Creating ANSWER...");
// Create answer. Answer SDP will be sent to offering client in
// PeerConnectionEvents.onLocalDescription event.
peerConnectionClient.createAnswer();
}
if (params.iceCandidates != null) {
// Add remote ICE candidates from room.
for (IceCandidate iceCandidate : params.iceCandidates) {
peerConnectionClient.addRemoteIceCandidate(iceCandidate);
}
}
}
}
@Override
public void onConnectedToRoom(final SignalingParameters params) {
runOnUiThread(new Runnable() {
@Override
public void run() {
onConnectedToRoomInternal(params);
}
});
}
@Override
public void onRemoteDescription(final SessionDescription desc) {
final long delta = System.currentTimeMillis() - callStartedTimeMs;
runOnUiThread(new Runnable() {
@Override
public void run() {
if (peerConnectionClient == null) {
Log.e(TAG, "Received remote SDP for non-initilized peer connection.");
return;
}
logAndToast("Received remote " + desc.type + ", delay=" + delta + "ms");
peerConnectionClient.setRemoteDescription(desc);
if (!signalingParameters.initiator) {
logAndToast("Creating ANSWER...");
// Create answer. Answer SDP will be sent to offering client in
// PeerConnectionEvents.onLocalDescription event.
peerConnectionClient.createAnswer();
}
}
});
}
@Override
public void onRemoteIceCandidate(final IceCandidate candidate) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (peerConnectionClient == null) {
Log.e(TAG, "Received ICE candidate for a non-initialized peer connection.");
return;
}
peerConnectionClient.addRemoteIceCandidate(candidate);
}
});
}
@Override
public void onRemoteIceCandidatesRemoved(final IceCandidate[] candidates) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (peerConnectionClient == null) {
Log.e(TAG, "Received ICE candidate removals for a non-initialized peer connection.");
return;
}
peerConnectionClient.removeRemoteIceCandidates(candidates);
}
});
}
@Override
public void onChannelClose() {
runOnUiThread(new Runnable() {
@Override
public void run() {
logAndToast("Remote end hung up; dropping PeerConnection");
disconnect();
}
});
}
@Override
public void onChannelError(final String description) {
reportError(description);
}
// -----Implementation of PeerConnectionClient.PeerConnectionEvents.---------
// Send local peer connection SDP and ICE candidates to remote party.
// All callbacks are invoked from peer connection client looper thread and
// are routed to UI thread.
@Override
public void onLocalDescription(final SessionDescription desc) {
final long delta = System.currentTimeMillis() - callStartedTimeMs;
runOnUiThread(new Runnable() {
@Override
public void run() {
if (appRtcClient != null) {
logAndToast("Sending " + desc.type + ", delay=" + delta + "ms");
if (signalingParameters.initiator) {
appRtcClient.sendOfferSdp(desc);
} else {
appRtcClient.sendAnswerSdp(desc);
}
}
if (peerConnectionParameters.videoMaxBitrate > 0) {
Log.d(TAG, "Set video maximum bitrate: " + peerConnectionParameters.videoMaxBitrate);
peerConnectionClient.setVideoMaxBitrate(peerConnectionParameters.videoMaxBitrate);
}
}
});
}
@Override
public void onIceCandidate(final IceCandidate candidate) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (appRtcClient != null) {
appRtcClient.sendLocalIceCandidate(candidate);
}
}
});
}
@Override
public void onIceCandidatesRemoved(final IceCandidate[] candidates) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (appRtcClient != null) {
appRtcClient.sendLocalIceCandidateRemovals(candidates);
}
}
});
}
@Override
public void onIceConnected() {
final long delta = System.currentTimeMillis() - callStartedTimeMs;
runOnUiThread(new Runnable() {
@Override
public void run() {
logAndToast("ICE connected, delay=" + delta + "ms");
}
});
}
@Override
public void onIceDisconnected() {
runOnUiThread(new Runnable() {
@Override
public void run() {
logAndToast("ICE disconnected");
}
});
}
@Override
public void onConnected() {
final long delta = System.currentTimeMillis() - callStartedTimeMs;
runOnUiThread(new Runnable() {
@Override
public void run() {
logAndToast("DTLS connected, delay=" + delta + "ms");
connected = true;
callConnected();
}
});
}
@Override
public void onDisconnected() {
runOnUiThread(new Runnable() {
@Override
public void run() {
logAndToast("DTLS disconnected");
connected = false;
disconnect();
}
});
}
@Override
public void onPeerConnectionClosed() {}
@Override
public void onPeerConnectionStatsReady(final StatsReport[] reports) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (!isError && connected) {
hudFragment.updateEncoderStatistics(reports);
}
}
});
}
@Override
public void onPeerConnectionError(final String description) {
reportError(description);
}
}

Просмотреть файл

@ -0,0 +1,137 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc;
import android.app.Activity;
import android.app.Fragment;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageButton;
import android.widget.SeekBar;
import android.widget.TextView;
import org.webrtc.RendererCommon.ScalingType;
/**
* Fragment for call control.
*/
public class CallFragment extends Fragment {
private TextView contactView;
private ImageButton cameraSwitchButton;
private ImageButton videoScalingButton;
private ImageButton toggleMuteButton;
private TextView captureFormatText;
private SeekBar captureFormatSlider;
private OnCallEvents callEvents;
private ScalingType scalingType;
private boolean videoCallEnabled = true;
/**
* Call control interface for container activity.
*/
public interface OnCallEvents {
void onCallHangUp();
void onCameraSwitch();
void onVideoScalingSwitch(ScalingType scalingType);
void onCaptureFormatChange(int width, int height, int framerate);
boolean onToggleMic();
}
@Override
public View onCreateView(
LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View controlView = inflater.inflate(R.layout.fragment_call, container, false);
// Create UI controls.
contactView = controlView.findViewById(R.id.contact_name_call);
ImageButton disconnectButton = controlView.findViewById(R.id.button_call_disconnect);
cameraSwitchButton = controlView.findViewById(R.id.button_call_switch_camera);
videoScalingButton = controlView.findViewById(R.id.button_call_scaling_mode);
toggleMuteButton = controlView.findViewById(R.id.button_call_toggle_mic);
captureFormatText = controlView.findViewById(R.id.capture_format_text_call);
captureFormatSlider = controlView.findViewById(R.id.capture_format_slider_call);
// Add buttons click events.
disconnectButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
callEvents.onCallHangUp();
}
});
cameraSwitchButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
callEvents.onCameraSwitch();
}
});
videoScalingButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (scalingType == ScalingType.SCALE_ASPECT_FILL) {
videoScalingButton.setBackgroundResource(R.drawable.ic_action_full_screen);
scalingType = ScalingType.SCALE_ASPECT_FIT;
} else {
videoScalingButton.setBackgroundResource(R.drawable.ic_action_return_from_full_screen);
scalingType = ScalingType.SCALE_ASPECT_FILL;
}
callEvents.onVideoScalingSwitch(scalingType);
}
});
scalingType = ScalingType.SCALE_ASPECT_FILL;
toggleMuteButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
boolean enabled = callEvents.onToggleMic();
toggleMuteButton.setAlpha(enabled ? 1.0f : 0.3f);
}
});
return controlView;
}
@Override
public void onStart() {
super.onStart();
boolean captureSliderEnabled = false;
Bundle args = getArguments();
if (args != null) {
String contactName = args.getString(CallActivity.EXTRA_ROOMID);
contactView.setText(contactName);
videoCallEnabled = args.getBoolean(CallActivity.EXTRA_VIDEO_CALL, true);
captureSliderEnabled = videoCallEnabled
&& args.getBoolean(CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED, false);
}
if (!videoCallEnabled) {
cameraSwitchButton.setVisibility(View.INVISIBLE);
}
if (captureSliderEnabled) {
captureFormatSlider.setOnSeekBarChangeListener(
new CaptureQualityController(captureFormatText, callEvents));
} else {
captureFormatText.setVisibility(View.GONE);
captureFormatSlider.setVisibility(View.GONE);
}
}
// TODO(sakal): Replace with onAttach(Context) once we only support API level 23+.
@SuppressWarnings("deprecation")
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
callEvents = (OnCallEvents) activity;
}
}

Просмотреть файл

@ -0,0 +1,110 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc;
import android.widget.SeekBar;
import android.widget.TextView;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
/**
* Control capture format based on a seekbar listener.
*/
public class CaptureQualityController implements SeekBar.OnSeekBarChangeListener {
private final List<CaptureFormat> formats =
Arrays.asList(new CaptureFormat(1280, 720, 0, 30000), new CaptureFormat(960, 540, 0, 30000),
new CaptureFormat(640, 480, 0, 30000), new CaptureFormat(480, 360, 0, 30000),
new CaptureFormat(320, 240, 0, 30000), new CaptureFormat(256, 144, 0, 30000));
// Prioritize framerate below this threshold and resolution above the threshold.
private static final int FRAMERATE_THRESHOLD = 15;
private TextView captureFormatText;
private CallFragment.OnCallEvents callEvents;
private int width;
private int height;
private int framerate;
private double targetBandwidth;
public CaptureQualityController(
TextView captureFormatText, CallFragment.OnCallEvents callEvents) {
this.captureFormatText = captureFormatText;
this.callEvents = callEvents;
}
private final Comparator<CaptureFormat> compareFormats = new Comparator<CaptureFormat>() {
@Override
public int compare(CaptureFormat first, CaptureFormat second) {
int firstFps = calculateFramerate(targetBandwidth, first);
int secondFps = calculateFramerate(targetBandwidth, second);
if ((firstFps >= FRAMERATE_THRESHOLD && secondFps >= FRAMERATE_THRESHOLD)
|| firstFps == secondFps) {
// Compare resolution.
return first.width * first.height - second.width * second.height;
} else {
// Compare fps.
return firstFps - secondFps;
}
}
};
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
if (progress == 0) {
width = 0;
height = 0;
framerate = 0;
captureFormatText.setText(R.string.muted);
return;
}
// Extract max bandwidth (in millipixels / second).
long maxCaptureBandwidth = java.lang.Long.MIN_VALUE;
for (CaptureFormat format : formats) {
maxCaptureBandwidth =
Math.max(maxCaptureBandwidth, (long) format.width * format.height * format.framerate.max);
}
// Fraction between 0 and 1.
double bandwidthFraction = (double) progress / 100.0;
// Make a log-scale transformation, still between 0 and 1.
final double kExpConstant = 3.0;
bandwidthFraction =
(Math.exp(kExpConstant * bandwidthFraction) - 1) / (Math.exp(kExpConstant) - 1);
targetBandwidth = bandwidthFraction * maxCaptureBandwidth;
// Choose the best format given a target bandwidth.
final CaptureFormat bestFormat = Collections.max(formats, compareFormats);
width = bestFormat.width;
height = bestFormat.height;
framerate = calculateFramerate(targetBandwidth, bestFormat);
captureFormatText.setText(
String.format(captureFormatText.getContext().getString(R.string.format_description), width,
height, framerate));
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
callEvents.onCaptureFormatChange(width, height, framerate);
}
// Return the highest frame rate possible based on bandwidth and format.
private int calculateFramerate(double bandwidth, CaptureFormat format) {
return (int) Math.round(
Math.min(format.framerate.max, (int) Math.round(bandwidth / (format.width * format.height)))
/ 1000.0);
}
}

Просмотреть файл

@ -0,0 +1,666 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc;
import android.annotation.TargetApi;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.annotation.Nullable;
import android.util.Log;
import android.view.ContextMenu;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.inputmethod.EditorInfo;
import android.webkit.URLUtil;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.ListView;
import android.widget.TextView;
import java.util.ArrayList;
import java.util.Random;
import org.json.JSONArray;
import org.json.JSONException;
/**
* Handles the initial setup where the user selects which room to join.
*/
public class ConnectActivity extends Activity {
private static final String TAG = "ConnectActivity";
private static final int CONNECTION_REQUEST = 1;
private static final int PERMISSION_REQUEST = 2;
private static final int REMOVE_FAVORITE_INDEX = 0;
private static boolean commandLineRun;
private ImageButton addFavoriteButton;
private EditText roomEditText;
private ListView roomListView;
private SharedPreferences sharedPref;
private String keyprefResolution;
private String keyprefFps;
private String keyprefVideoBitrateType;
private String keyprefVideoBitrateValue;
private String keyprefAudioBitrateType;
private String keyprefAudioBitrateValue;
private String keyprefRoomServerUrl;
private String keyprefRoom;
private String keyprefRoomList;
private ArrayList<String> roomList;
private ArrayAdapter<String> adapter;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Get setting keys.
PreferenceManager.setDefaultValues(this, R.xml.preferences, false);
sharedPref = PreferenceManager.getDefaultSharedPreferences(this);
keyprefResolution = getString(R.string.pref_resolution_key);
keyprefFps = getString(R.string.pref_fps_key);
keyprefVideoBitrateType = getString(R.string.pref_maxvideobitrate_key);
keyprefVideoBitrateValue = getString(R.string.pref_maxvideobitratevalue_key);
keyprefAudioBitrateType = getString(R.string.pref_startaudiobitrate_key);
keyprefAudioBitrateValue = getString(R.string.pref_startaudiobitratevalue_key);
keyprefRoomServerUrl = getString(R.string.pref_room_server_url_key);
keyprefRoom = getString(R.string.pref_room_key);
keyprefRoomList = getString(R.string.pref_room_list_key);
setContentView(R.layout.activity_connect);
roomEditText = findViewById(R.id.room_edittext);
roomEditText.setOnEditorActionListener(new TextView.OnEditorActionListener() {
@Override
public boolean onEditorAction(TextView textView, int i, KeyEvent keyEvent) {
if (i == EditorInfo.IME_ACTION_DONE) {
addFavoriteButton.performClick();
return true;
}
return false;
}
});
roomEditText.requestFocus();
roomListView = findViewById(R.id.room_listview);
roomListView.setEmptyView(findViewById(android.R.id.empty));
roomListView.setOnItemClickListener(roomListClickListener);
registerForContextMenu(roomListView);
ImageButton connectButton = findViewById(R.id.connect_button);
connectButton.setOnClickListener(connectListener);
addFavoriteButton = findViewById(R.id.add_favorite_button);
addFavoriteButton.setOnClickListener(addFavoriteListener);
requestPermissions();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.connect_menu, menu);
return true;
}
@Override
public void onCreateContextMenu(ContextMenu menu, View v, ContextMenu.ContextMenuInfo menuInfo) {
if (v.getId() == R.id.room_listview) {
AdapterView.AdapterContextMenuInfo info = (AdapterView.AdapterContextMenuInfo) menuInfo;
menu.setHeaderTitle(roomList.get(info.position));
String[] menuItems = getResources().getStringArray(R.array.roomListContextMenu);
for (int i = 0; i < menuItems.length; i++) {
menu.add(Menu.NONE, i, i, menuItems[i]);
}
} else {
super.onCreateContextMenu(menu, v, menuInfo);
}
}
@Override
public boolean onContextItemSelected(MenuItem item) {
if (item.getItemId() == REMOVE_FAVORITE_INDEX) {
AdapterView.AdapterContextMenuInfo info =
(AdapterView.AdapterContextMenuInfo) item.getMenuInfo();
roomList.remove(info.position);
adapter.notifyDataSetChanged();
return true;
}
return super.onContextItemSelected(item);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle presses on the action bar items.
if (item.getItemId() == R.id.action_settings) {
Intent intent = new Intent(this, SettingsActivity.class);
startActivity(intent);
return true;
} else if (item.getItemId() == R.id.action_loopback) {
connectToRoom(null, false, true, false, 0);
return true;
} else {
return super.onOptionsItemSelected(item);
}
}
@Override
public void onPause() {
super.onPause();
String room = roomEditText.getText().toString();
String roomListJson = new JSONArray(roomList).toString();
SharedPreferences.Editor editor = sharedPref.edit();
editor.putString(keyprefRoom, room);
editor.putString(keyprefRoomList, roomListJson);
editor.commit();
}
@Override
public void onResume() {
super.onResume();
String room = sharedPref.getString(keyprefRoom, "");
roomEditText.setText(room);
roomList = new ArrayList<>();
String roomListJson = sharedPref.getString(keyprefRoomList, null);
if (roomListJson != null) {
try {
JSONArray jsonArray = new JSONArray(roomListJson);
for (int i = 0; i < jsonArray.length(); i++) {
roomList.add(jsonArray.get(i).toString());
}
} catch (JSONException e) {
Log.e(TAG, "Failed to load room list: " + e.toString());
}
}
adapter = new ArrayAdapter<>(this, android.R.layout.simple_list_item_1, roomList);
roomListView.setAdapter(adapter);
if (adapter.getCount() > 0) {
roomListView.requestFocus();
roomListView.setItemChecked(0, true);
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == CONNECTION_REQUEST && commandLineRun) {
Log.d(TAG, "Return: " + resultCode);
setResult(resultCode);
commandLineRun = false;
finish();
}
}
@Override
public void onRequestPermissionsResult(
int requestCode, String[] permissions, int[] grantResults) {
if (requestCode == PERMISSION_REQUEST) {
String[] missingPermissions = getMissingPermissions();
if (missingPermissions.length != 0) {
// User didn't grant all the permissions. Warn that the application might not work
// correctly.
new AlertDialog.Builder(this)
.setMessage(R.string.missing_permissions_try_again)
.setPositiveButton(R.string.yes,
(dialog, id) -> {
// User wants to try giving the permissions again.
dialog.cancel();
requestPermissions();
})
.setNegativeButton(R.string.no,
(dialog, id) -> {
// User doesn't want to give the permissions.
dialog.cancel();
onPermissionsGranted();
})
.show();
} else {
// All permissions granted.
onPermissionsGranted();
}
}
}
private void onPermissionsGranted() {
// If an implicit VIEW intent is launching the app, go directly to that URL.
final Intent intent = getIntent();
if ("android.intent.action.VIEW".equals(intent.getAction()) && !commandLineRun) {
boolean loopback = intent.getBooleanExtra(CallActivity.EXTRA_LOOPBACK, false);
int runTimeMs = intent.getIntExtra(CallActivity.EXTRA_RUNTIME, 0);
boolean useValuesFromIntent =
intent.getBooleanExtra(CallActivity.EXTRA_USE_VALUES_FROM_INTENT, false);
String room = sharedPref.getString(keyprefRoom, "");
connectToRoom(room, true, loopback, useValuesFromIntent, runTimeMs);
}
}
@TargetApi(Build.VERSION_CODES.M)
private void requestPermissions() {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
// Dynamic permissions are not required before Android M.
onPermissionsGranted();
return;
}
String[] missingPermissions = getMissingPermissions();
if (missingPermissions.length != 0) {
requestPermissions(missingPermissions, PERMISSION_REQUEST);
} else {
onPermissionsGranted();
}
}
@TargetApi(Build.VERSION_CODES.M)
private String[] getMissingPermissions() {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
return new String[0];
}
PackageInfo info;
try {
info = getPackageManager().getPackageInfo(getPackageName(), PackageManager.GET_PERMISSIONS);
} catch (PackageManager.NameNotFoundException e) {
Log.w(TAG, "Failed to retrieve permissions.");
return new String[0];
}
if (info.requestedPermissions == null) {
Log.w(TAG, "No requested permissions.");
return new String[0];
}
ArrayList<String> missingPermissions = new ArrayList<>();
for (int i = 0; i < info.requestedPermissions.length; i++) {
if ((info.requestedPermissionsFlags[i] & PackageInfo.REQUESTED_PERMISSION_GRANTED) == 0) {
missingPermissions.add(info.requestedPermissions[i]);
}
}
Log.d(TAG, "Missing permissions: " + missingPermissions);
return missingPermissions.toArray(new String[missingPermissions.size()]);
}
/**
* Get a value from the shared preference or from the intent, if it does not
* exist the default is used.
*/
@Nullable
private String sharedPrefGetString(
int attributeId, String intentName, int defaultId, boolean useFromIntent) {
String defaultValue = getString(defaultId);
if (useFromIntent) {
String value = getIntent().getStringExtra(intentName);
if (value != null) {
return value;
}
return defaultValue;
} else {
String attributeName = getString(attributeId);
return sharedPref.getString(attributeName, defaultValue);
}
}
/**
* Get a value from the shared preference or from the intent, if it does not
* exist the default is used.
*/
private boolean sharedPrefGetBoolean(
int attributeId, String intentName, int defaultId, boolean useFromIntent) {
boolean defaultValue = Boolean.parseBoolean(getString(defaultId));
if (useFromIntent) {
return getIntent().getBooleanExtra(intentName, defaultValue);
} else {
String attributeName = getString(attributeId);
return sharedPref.getBoolean(attributeName, defaultValue);
}
}
/**
* Get a value from the shared preference or from the intent, if it does not
* exist the default is used.
*/
private int sharedPrefGetInteger(
int attributeId, String intentName, int defaultId, boolean useFromIntent) {
String defaultString = getString(defaultId);
int defaultValue = Integer.parseInt(defaultString);
if (useFromIntent) {
return getIntent().getIntExtra(intentName, defaultValue);
} else {
String attributeName = getString(attributeId);
String value = sharedPref.getString(attributeName, defaultString);
try {
return Integer.parseInt(value);
} catch (NumberFormatException e) {
Log.e(TAG, "Wrong setting for: " + attributeName + ":" + value);
return defaultValue;
}
}
}
@SuppressWarnings("StringSplitter")
private void connectToRoom(String roomId, boolean commandLineRun, boolean loopback,
boolean useValuesFromIntent, int runTimeMs) {
ConnectActivity.commandLineRun = commandLineRun;
// roomId is random for loopback.
if (loopback) {
roomId = Integer.toString((new Random()).nextInt(100000000));
}
String roomUrl = sharedPref.getString(
keyprefRoomServerUrl, getString(R.string.pref_room_server_url_default));
// Video call enabled flag.
boolean videoCallEnabled = sharedPrefGetBoolean(R.string.pref_videocall_key,
CallActivity.EXTRA_VIDEO_CALL, R.string.pref_videocall_default, useValuesFromIntent);
// Use screencapture option.
boolean useScreencapture = sharedPrefGetBoolean(R.string.pref_screencapture_key,
CallActivity.EXTRA_SCREENCAPTURE, R.string.pref_screencapture_default, useValuesFromIntent);
// Use Camera2 option.
boolean useCamera2 = sharedPrefGetBoolean(R.string.pref_camera2_key, CallActivity.EXTRA_CAMERA2,
R.string.pref_camera2_default, useValuesFromIntent);
// Get default codecs.
String videoCodec = sharedPrefGetString(R.string.pref_videocodec_key,
CallActivity.EXTRA_VIDEOCODEC, R.string.pref_videocodec_default, useValuesFromIntent);
String audioCodec = sharedPrefGetString(R.string.pref_audiocodec_key,
CallActivity.EXTRA_AUDIOCODEC, R.string.pref_audiocodec_default, useValuesFromIntent);
// Check HW codec flag.
boolean hwCodec = sharedPrefGetBoolean(R.string.pref_hwcodec_key,
CallActivity.EXTRA_HWCODEC_ENABLED, R.string.pref_hwcodec_default, useValuesFromIntent);
// Check Capture to texture.
boolean captureToTexture = sharedPrefGetBoolean(R.string.pref_capturetotexture_key,
CallActivity.EXTRA_CAPTURETOTEXTURE_ENABLED, R.string.pref_capturetotexture_default,
useValuesFromIntent);
// Check FlexFEC.
boolean flexfecEnabled = sharedPrefGetBoolean(R.string.pref_flexfec_key,
CallActivity.EXTRA_FLEXFEC_ENABLED, R.string.pref_flexfec_default, useValuesFromIntent);
// Check Disable Audio Processing flag.
boolean noAudioProcessing = sharedPrefGetBoolean(R.string.pref_noaudioprocessing_key,
CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED, R.string.pref_noaudioprocessing_default,
useValuesFromIntent);
boolean aecDump = sharedPrefGetBoolean(R.string.pref_aecdump_key,
CallActivity.EXTRA_AECDUMP_ENABLED, R.string.pref_aecdump_default, useValuesFromIntent);
boolean saveInputAudioToFile =
sharedPrefGetBoolean(R.string.pref_enable_save_input_audio_to_file_key,
CallActivity.EXTRA_SAVE_INPUT_AUDIO_TO_FILE_ENABLED,
R.string.pref_enable_save_input_audio_to_file_default, useValuesFromIntent);
// Check OpenSL ES enabled flag.
boolean useOpenSLES = sharedPrefGetBoolean(R.string.pref_opensles_key,
CallActivity.EXTRA_OPENSLES_ENABLED, R.string.pref_opensles_default, useValuesFromIntent);
// Check Disable built-in AEC flag.
boolean disableBuiltInAEC = sharedPrefGetBoolean(R.string.pref_disable_built_in_aec_key,
CallActivity.EXTRA_DISABLE_BUILT_IN_AEC, R.string.pref_disable_built_in_aec_default,
useValuesFromIntent);
// Check Disable built-in AGC flag.
boolean disableBuiltInAGC = sharedPrefGetBoolean(R.string.pref_disable_built_in_agc_key,
CallActivity.EXTRA_DISABLE_BUILT_IN_AGC, R.string.pref_disable_built_in_agc_default,
useValuesFromIntent);
// Check Disable built-in NS flag.
boolean disableBuiltInNS = sharedPrefGetBoolean(R.string.pref_disable_built_in_ns_key,
CallActivity.EXTRA_DISABLE_BUILT_IN_NS, R.string.pref_disable_built_in_ns_default,
useValuesFromIntent);
// Check Disable gain control
boolean disableWebRtcAGCAndHPF = sharedPrefGetBoolean(
R.string.pref_disable_webrtc_agc_and_hpf_key, CallActivity.EXTRA_DISABLE_WEBRTC_AGC_AND_HPF,
R.string.pref_disable_webrtc_agc_and_hpf_key, useValuesFromIntent);
// Get video resolution from settings.
int videoWidth = 0;
int videoHeight = 0;
if (useValuesFromIntent) {
videoWidth = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_WIDTH, 0);
videoHeight = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_HEIGHT, 0);
}
if (videoWidth == 0 && videoHeight == 0) {
String resolution =
sharedPref.getString(keyprefResolution, getString(R.string.pref_resolution_default));
String[] dimensions = resolution.split("[ x]+");
if (dimensions.length == 2) {
try {
videoWidth = Integer.parseInt(dimensions[0]);
videoHeight = Integer.parseInt(dimensions[1]);
} catch (NumberFormatException e) {
videoWidth = 0;
videoHeight = 0;
Log.e(TAG, "Wrong video resolution setting: " + resolution);
}
}
}
// Get camera fps from settings.
int cameraFps = 0;
if (useValuesFromIntent) {
cameraFps = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_FPS, 0);
}
if (cameraFps == 0) {
String fps = sharedPref.getString(keyprefFps, getString(R.string.pref_fps_default));
String[] fpsValues = fps.split("[ x]+");
if (fpsValues.length == 2) {
try {
cameraFps = Integer.parseInt(fpsValues[0]);
} catch (NumberFormatException e) {
cameraFps = 0;
Log.e(TAG, "Wrong camera fps setting: " + fps);
}
}
}
// Check capture quality slider flag.
boolean captureQualitySlider = sharedPrefGetBoolean(R.string.pref_capturequalityslider_key,
CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED,
R.string.pref_capturequalityslider_default, useValuesFromIntent);
// Get video and audio start bitrate.
int videoStartBitrate = 0;
if (useValuesFromIntent) {
videoStartBitrate = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_BITRATE, 0);
}
if (videoStartBitrate == 0) {
String bitrateTypeDefault = getString(R.string.pref_maxvideobitrate_default);
String bitrateType = sharedPref.getString(keyprefVideoBitrateType, bitrateTypeDefault);
if (!bitrateType.equals(bitrateTypeDefault)) {
String bitrateValue = sharedPref.getString(
keyprefVideoBitrateValue, getString(R.string.pref_maxvideobitratevalue_default));
videoStartBitrate = Integer.parseInt(bitrateValue);
}
}
int audioStartBitrate = 0;
if (useValuesFromIntent) {
audioStartBitrate = getIntent().getIntExtra(CallActivity.EXTRA_AUDIO_BITRATE, 0);
}
if (audioStartBitrate == 0) {
String bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default);
String bitrateType = sharedPref.getString(keyprefAudioBitrateType, bitrateTypeDefault);
if (!bitrateType.equals(bitrateTypeDefault)) {
String bitrateValue = sharedPref.getString(
keyprefAudioBitrateValue, getString(R.string.pref_startaudiobitratevalue_default));
audioStartBitrate = Integer.parseInt(bitrateValue);
}
}
// Check statistics display option.
boolean displayHud = sharedPrefGetBoolean(R.string.pref_displayhud_key,
CallActivity.EXTRA_DISPLAY_HUD, R.string.pref_displayhud_default, useValuesFromIntent);
boolean tracing = sharedPrefGetBoolean(R.string.pref_tracing_key, CallActivity.EXTRA_TRACING,
R.string.pref_tracing_default, useValuesFromIntent);
// Check Enable RtcEventLog.
boolean rtcEventLogEnabled = sharedPrefGetBoolean(R.string.pref_enable_rtceventlog_key,
CallActivity.EXTRA_ENABLE_RTCEVENTLOG, R.string.pref_enable_rtceventlog_default,
useValuesFromIntent);
// Get datachannel options
boolean dataChannelEnabled = sharedPrefGetBoolean(R.string.pref_enable_datachannel_key,
CallActivity.EXTRA_DATA_CHANNEL_ENABLED, R.string.pref_enable_datachannel_default,
useValuesFromIntent);
boolean ordered = sharedPrefGetBoolean(R.string.pref_ordered_key, CallActivity.EXTRA_ORDERED,
R.string.pref_ordered_default, useValuesFromIntent);
boolean negotiated = sharedPrefGetBoolean(R.string.pref_negotiated_key,
CallActivity.EXTRA_NEGOTIATED, R.string.pref_negotiated_default, useValuesFromIntent);
int maxRetrMs = sharedPrefGetInteger(R.string.pref_max_retransmit_time_ms_key,
CallActivity.EXTRA_MAX_RETRANSMITS_MS, R.string.pref_max_retransmit_time_ms_default,
useValuesFromIntent);
int maxRetr =
sharedPrefGetInteger(R.string.pref_max_retransmits_key, CallActivity.EXTRA_MAX_RETRANSMITS,
R.string.pref_max_retransmits_default, useValuesFromIntent);
int id = sharedPrefGetInteger(R.string.pref_data_id_key, CallActivity.EXTRA_ID,
R.string.pref_data_id_default, useValuesFromIntent);
String protocol = sharedPrefGetString(R.string.pref_data_protocol_key,
CallActivity.EXTRA_PROTOCOL, R.string.pref_data_protocol_default, useValuesFromIntent);
// Start AppRTCMobile activity.
Log.d(TAG, "Connecting to room " + roomId + " at URL " + roomUrl);
if (validateUrl(roomUrl)) {
Uri uri = Uri.parse(roomUrl);
Intent intent = new Intent(this, CallActivity.class);
intent.setData(uri);
intent.putExtra(CallActivity.EXTRA_ROOMID, roomId);
intent.putExtra(CallActivity.EXTRA_LOOPBACK, loopback);
intent.putExtra(CallActivity.EXTRA_VIDEO_CALL, videoCallEnabled);
intent.putExtra(CallActivity.EXTRA_SCREENCAPTURE, useScreencapture);
intent.putExtra(CallActivity.EXTRA_CAMERA2, useCamera2);
intent.putExtra(CallActivity.EXTRA_VIDEO_WIDTH, videoWidth);
intent.putExtra(CallActivity.EXTRA_VIDEO_HEIGHT, videoHeight);
intent.putExtra(CallActivity.EXTRA_VIDEO_FPS, cameraFps);
intent.putExtra(CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED, captureQualitySlider);
intent.putExtra(CallActivity.EXTRA_VIDEO_BITRATE, videoStartBitrate);
intent.putExtra(CallActivity.EXTRA_VIDEOCODEC, videoCodec);
intent.putExtra(CallActivity.EXTRA_HWCODEC_ENABLED, hwCodec);
intent.putExtra(CallActivity.EXTRA_CAPTURETOTEXTURE_ENABLED, captureToTexture);
intent.putExtra(CallActivity.EXTRA_FLEXFEC_ENABLED, flexfecEnabled);
intent.putExtra(CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED, noAudioProcessing);
intent.putExtra(CallActivity.EXTRA_AECDUMP_ENABLED, aecDump);
intent.putExtra(CallActivity.EXTRA_SAVE_INPUT_AUDIO_TO_FILE_ENABLED, saveInputAudioToFile);
intent.putExtra(CallActivity.EXTRA_OPENSLES_ENABLED, useOpenSLES);
intent.putExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_AEC, disableBuiltInAEC);
intent.putExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_AGC, disableBuiltInAGC);
intent.putExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_NS, disableBuiltInNS);
intent.putExtra(CallActivity.EXTRA_DISABLE_WEBRTC_AGC_AND_HPF, disableWebRtcAGCAndHPF);
intent.putExtra(CallActivity.EXTRA_AUDIO_BITRATE, audioStartBitrate);
intent.putExtra(CallActivity.EXTRA_AUDIOCODEC, audioCodec);
intent.putExtra(CallActivity.EXTRA_DISPLAY_HUD, displayHud);
intent.putExtra(CallActivity.EXTRA_TRACING, tracing);
intent.putExtra(CallActivity.EXTRA_ENABLE_RTCEVENTLOG, rtcEventLogEnabled);
intent.putExtra(CallActivity.EXTRA_CMDLINE, commandLineRun);
intent.putExtra(CallActivity.EXTRA_RUNTIME, runTimeMs);
intent.putExtra(CallActivity.EXTRA_DATA_CHANNEL_ENABLED, dataChannelEnabled);
if (dataChannelEnabled) {
intent.putExtra(CallActivity.EXTRA_ORDERED, ordered);
intent.putExtra(CallActivity.EXTRA_MAX_RETRANSMITS_MS, maxRetrMs);
intent.putExtra(CallActivity.EXTRA_MAX_RETRANSMITS, maxRetr);
intent.putExtra(CallActivity.EXTRA_PROTOCOL, protocol);
intent.putExtra(CallActivity.EXTRA_NEGOTIATED, negotiated);
intent.putExtra(CallActivity.EXTRA_ID, id);
}
if (useValuesFromIntent) {
if (getIntent().hasExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA)) {
String videoFileAsCamera =
getIntent().getStringExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA);
intent.putExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA, videoFileAsCamera);
}
if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE)) {
String saveRemoteVideoToFile =
getIntent().getStringExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE);
intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE, saveRemoteVideoToFile);
}
if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH)) {
int videoOutWidth =
getIntent().getIntExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, 0);
intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, videoOutWidth);
}
if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT)) {
int videoOutHeight =
getIntent().getIntExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, 0);
intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, videoOutHeight);
}
}
startActivityForResult(intent, CONNECTION_REQUEST);
}
}
private boolean validateUrl(String url) {
if (URLUtil.isHttpsUrl(url) || URLUtil.isHttpUrl(url)) {
return true;
}
new AlertDialog.Builder(this)
.setTitle(getText(R.string.invalid_url_title))
.setMessage(getString(R.string.invalid_url_text, url))
.setCancelable(false)
.setNeutralButton(R.string.ok,
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
}
})
.create()
.show();
return false;
}
private final AdapterView.OnItemClickListener roomListClickListener =
new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) {
String roomId = ((TextView) view).getText().toString();
connectToRoom(roomId, false, false, false, 0);
}
};
private final OnClickListener addFavoriteListener = new OnClickListener() {
@Override
public void onClick(View view) {
String newRoom = roomEditText.getText().toString();
if (newRoom.length() > 0 && !roomList.contains(newRoom)) {
adapter.add(newRoom);
adapter.notifyDataSetChanged();
}
}
};
private final OnClickListener connectListener = new OnClickListener() {
@Override
public void onClick(View view) {
connectToRoom(roomEditText.getText().toString(), false, false, false, 0);
}
};
}

Просмотреть файл

@ -0,0 +1,523 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.BatteryManager;
import android.os.Build;
import android.os.SystemClock;
import android.support.annotation.Nullable;
import android.util.Log;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.Scanner;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
/**
* Simple CPU monitor. The caller creates a CpuMonitor object which can then
* be used via sampleCpuUtilization() to collect the percentual use of the
* cumulative CPU capacity for all CPUs running at their nominal frequency. 3
* values are generated: (1) getCpuCurrent() returns the use since the last
* sampleCpuUtilization(), (2) getCpuAvg3() returns the use since 3 prior
* calls, and (3) getCpuAvgAll() returns the use over all SAMPLE_SAVE_NUMBER
* calls.
*
* <p>CPUs in Android are often "offline", and while this of course means 0 Hz
* as current frequency, in this state we cannot even get their nominal
* frequency. We therefore tread carefully, and allow any CPU to be missing.
* Missing CPUs are assumed to have the same nominal frequency as any close
* lower-numbered CPU, but as soon as it is online, we'll get their proper
* frequency and remember it. (Since CPU 0 in practice always seem to be
* online, this unidirectional frequency inheritance should be no problem in
* practice.)
*
* <p>Caveats:
* o No provision made for zany "turbo" mode, common in the x86 world.
* o No provision made for ARM big.LITTLE; if CPU n can switch behind our
* back, we might get incorrect estimates.
* o This is not thread-safe. To call asynchronously, create different
* CpuMonitor objects.
*
* <p>If we can gather enough info to generate a sensible result,
* sampleCpuUtilization returns true. It is designed to never throw an
* exception.
*
* <p>sampleCpuUtilization should not be called too often in its present form,
* since then deltas would be small and the percent values would fluctuate and
* be unreadable. If it is desirable to call it more often than say once per
* second, one would need to increase SAMPLE_SAVE_NUMBER and probably use
* Queue<Integer> to avoid copying overhead.
*
* <p>Known problems:
* 1. Nexus 7 devices running Kitkat have a kernel which often output an
* incorrect 'idle' field in /proc/stat. The value is close to twice the
* correct value, and then returns to back to correct reading. Both when
* jumping up and back down we might create faulty CPU load readings.
*/
@TargetApi(Build.VERSION_CODES.KITKAT)
class CpuMonitor {
private static final String TAG = "CpuMonitor";
private static final int MOVING_AVERAGE_SAMPLES = 5;
private static final int CPU_STAT_SAMPLE_PERIOD_MS = 2000;
private static final int CPU_STAT_LOG_PERIOD_MS = 6000;
private final Context appContext;
// User CPU usage at current frequency.
private final MovingAverage userCpuUsage;
// System CPU usage at current frequency.
private final MovingAverage systemCpuUsage;
// Total CPU usage relative to maximum frequency.
private final MovingAverage totalCpuUsage;
// CPU frequency in percentage from maximum.
private final MovingAverage frequencyScale;
@Nullable
private ScheduledExecutorService executor;
private long lastStatLogTimeMs;
private long[] cpuFreqMax;
private int cpusPresent;
private int actualCpusPresent;
private boolean initialized;
private boolean cpuOveruse;
private String[] maxPath;
private String[] curPath;
private double[] curFreqScales;
@Nullable
private ProcStat lastProcStat;
private static class ProcStat {
final long userTime;
final long systemTime;
final long idleTime;
ProcStat(long userTime, long systemTime, long idleTime) {
this.userTime = userTime;
this.systemTime = systemTime;
this.idleTime = idleTime;
}
}
private static class MovingAverage {
private final int size;
private double sum;
private double currentValue;
private double[] circBuffer;
private int circBufferIndex;
public MovingAverage(int size) {
if (size <= 0) {
throw new AssertionError("Size value in MovingAverage ctor should be positive.");
}
this.size = size;
circBuffer = new double[size];
}
public void reset() {
Arrays.fill(circBuffer, 0);
circBufferIndex = 0;
sum = 0;
currentValue = 0;
}
public void addValue(double value) {
sum -= circBuffer[circBufferIndex];
circBuffer[circBufferIndex++] = value;
currentValue = value;
sum += value;
if (circBufferIndex >= size) {
circBufferIndex = 0;
}
}
public double getCurrent() {
return currentValue;
}
public double getAverage() {
return sum / (double) size;
}
}
public static boolean isSupported() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT
&& Build.VERSION.SDK_INT < Build.VERSION_CODES.N;
}
public CpuMonitor(Context context) {
if (!isSupported()) {
throw new RuntimeException("CpuMonitor is not supported on this Android version.");
}
Log.d(TAG, "CpuMonitor ctor.");
appContext = context.getApplicationContext();
userCpuUsage = new MovingAverage(MOVING_AVERAGE_SAMPLES);
systemCpuUsage = new MovingAverage(MOVING_AVERAGE_SAMPLES);
totalCpuUsage = new MovingAverage(MOVING_AVERAGE_SAMPLES);
frequencyScale = new MovingAverage(MOVING_AVERAGE_SAMPLES);
lastStatLogTimeMs = SystemClock.elapsedRealtime();
scheduleCpuUtilizationTask();
}
public void pause() {
if (executor != null) {
Log.d(TAG, "pause");
executor.shutdownNow();
executor = null;
}
}
public void resume() {
Log.d(TAG, "resume");
resetStat();
scheduleCpuUtilizationTask();
}
// TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
@SuppressWarnings("NoSynchronizedMethodCheck")
public synchronized void reset() {
if (executor != null) {
Log.d(TAG, "reset");
resetStat();
cpuOveruse = false;
}
}
// TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
@SuppressWarnings("NoSynchronizedMethodCheck")
public synchronized int getCpuUsageCurrent() {
return doubleToPercent(userCpuUsage.getCurrent() + systemCpuUsage.getCurrent());
}
// TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
@SuppressWarnings("NoSynchronizedMethodCheck")
public synchronized int getCpuUsageAverage() {
return doubleToPercent(userCpuUsage.getAverage() + systemCpuUsage.getAverage());
}
// TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
@SuppressWarnings("NoSynchronizedMethodCheck")
public synchronized int getFrequencyScaleAverage() {
return doubleToPercent(frequencyScale.getAverage());
}
private void scheduleCpuUtilizationTask() {
if (executor != null) {
executor.shutdownNow();
executor = null;
}
executor = Executors.newSingleThreadScheduledExecutor();
@SuppressWarnings("unused") // Prevent downstream linter warnings.
Future<?> possiblyIgnoredError = executor.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
cpuUtilizationTask();
}
}, 0, CPU_STAT_SAMPLE_PERIOD_MS, TimeUnit.MILLISECONDS);
}
private void cpuUtilizationTask() {
boolean cpuMonitorAvailable = sampleCpuUtilization();
if (cpuMonitorAvailable
&& SystemClock.elapsedRealtime() - lastStatLogTimeMs >= CPU_STAT_LOG_PERIOD_MS) {
lastStatLogTimeMs = SystemClock.elapsedRealtime();
String statString = getStatString();
Log.d(TAG, statString);
}
}
private void init() {
try (FileInputStream fin = new FileInputStream("/sys/devices/system/cpu/present");
InputStreamReader streamReader = new InputStreamReader(fin, Charset.forName("UTF-8"));
BufferedReader reader = new BufferedReader(streamReader);
Scanner scanner = new Scanner(reader).useDelimiter("[-\n]");) {
scanner.nextInt(); // Skip leading number 0.
cpusPresent = 1 + scanner.nextInt();
scanner.close();
} catch (FileNotFoundException e) {
Log.e(TAG, "Cannot do CPU stats since /sys/devices/system/cpu/present is missing");
} catch (IOException e) {
Log.e(TAG, "Error closing file");
} catch (Exception e) {
Log.e(TAG, "Cannot do CPU stats due to /sys/devices/system/cpu/present parsing problem");
}
cpuFreqMax = new long[cpusPresent];
maxPath = new String[cpusPresent];
curPath = new String[cpusPresent];
curFreqScales = new double[cpusPresent];
for (int i = 0; i < cpusPresent; i++) {
cpuFreqMax[i] = 0; // Frequency "not yet determined".
curFreqScales[i] = 0;
maxPath[i] = "/sys/devices/system/cpu/cpu" + i + "/cpufreq/cpuinfo_max_freq";
curPath[i] = "/sys/devices/system/cpu/cpu" + i + "/cpufreq/scaling_cur_freq";
}
lastProcStat = new ProcStat(0, 0, 0);
resetStat();
initialized = true;
}
private synchronized void resetStat() {
userCpuUsage.reset();
systemCpuUsage.reset();
totalCpuUsage.reset();
frequencyScale.reset();
lastStatLogTimeMs = SystemClock.elapsedRealtime();
}
private int getBatteryLevel() {
// Use sticky broadcast with null receiver to read battery level once only.
Intent intent = appContext.registerReceiver(
null /* receiver */, new IntentFilter(Intent.ACTION_BATTERY_CHANGED));
int batteryLevel = 0;
int batteryScale = intent.getIntExtra(BatteryManager.EXTRA_SCALE, 100);
if (batteryScale > 0) {
batteryLevel =
(int) (100f * intent.getIntExtra(BatteryManager.EXTRA_LEVEL, 0) / batteryScale);
}
return batteryLevel;
}
/**
* Re-measure CPU use. Call this method at an interval of around 1/s.
* This method returns true on success. The fields
* cpuCurrent, cpuAvg3, and cpuAvgAll are updated on success, and represents:
* cpuCurrent: The CPU use since the last sampleCpuUtilization call.
* cpuAvg3: The average CPU over the last 3 calls.
* cpuAvgAll: The average CPU over the last SAMPLE_SAVE_NUMBER calls.
*/
private synchronized boolean sampleCpuUtilization() {
long lastSeenMaxFreq = 0;
long cpuFreqCurSum = 0;
long cpuFreqMaxSum = 0;
if (!initialized) {
init();
}
if (cpusPresent == 0) {
return false;
}
actualCpusPresent = 0;
for (int i = 0; i < cpusPresent; i++) {
/*
* For each CPU, attempt to first read its max frequency, then its
* current frequency. Once as the max frequency for a CPU is found,
* save it in cpuFreqMax[].
*/
curFreqScales[i] = 0;
if (cpuFreqMax[i] == 0) {
// We have never found this CPU's max frequency. Attempt to read it.
long cpufreqMax = readFreqFromFile(maxPath[i]);
if (cpufreqMax > 0) {
Log.d(TAG, "Core " + i + ". Max frequency: " + cpufreqMax);
lastSeenMaxFreq = cpufreqMax;
cpuFreqMax[i] = cpufreqMax;
maxPath[i] = null; // Kill path to free its memory.
}
} else {
lastSeenMaxFreq = cpuFreqMax[i]; // A valid, previously read value.
}
long cpuFreqCur = readFreqFromFile(curPath[i]);
if (cpuFreqCur == 0 && lastSeenMaxFreq == 0) {
// No current frequency information for this CPU core - ignore it.
continue;
}
if (cpuFreqCur > 0) {
actualCpusPresent++;
}
cpuFreqCurSum += cpuFreqCur;
/* Here, lastSeenMaxFreq might come from
* 1. cpuFreq[i], or
* 2. a previous iteration, or
* 3. a newly read value, or
* 4. hypothetically from the pre-loop dummy.
*/
cpuFreqMaxSum += lastSeenMaxFreq;
if (lastSeenMaxFreq > 0) {
curFreqScales[i] = (double) cpuFreqCur / lastSeenMaxFreq;
}
}
if (cpuFreqCurSum == 0 || cpuFreqMaxSum == 0) {
Log.e(TAG, "Could not read max or current frequency for any CPU");
return false;
}
/*
* Since the cycle counts are for the period between the last invocation
* and this present one, we average the percentual CPU frequencies between
* now and the beginning of the measurement period. This is significantly
* incorrect only if the frequencies have peeked or dropped in between the
* invocations.
*/
double currentFrequencyScale = cpuFreqCurSum / (double) cpuFreqMaxSum;
if (frequencyScale.getCurrent() > 0) {
currentFrequencyScale = (frequencyScale.getCurrent() + currentFrequencyScale) * 0.5;
}
ProcStat procStat = readProcStat();
if (procStat == null) {
return false;
}
long diffUserTime = procStat.userTime - lastProcStat.userTime;
long diffSystemTime = procStat.systemTime - lastProcStat.systemTime;
long diffIdleTime = procStat.idleTime - lastProcStat.idleTime;
long allTime = diffUserTime + diffSystemTime + diffIdleTime;
if (currentFrequencyScale == 0 || allTime == 0) {
return false;
}
// Update statistics.
frequencyScale.addValue(currentFrequencyScale);
double currentUserCpuUsage = diffUserTime / (double) allTime;
userCpuUsage.addValue(currentUserCpuUsage);
double currentSystemCpuUsage = diffSystemTime / (double) allTime;
systemCpuUsage.addValue(currentSystemCpuUsage);
double currentTotalCpuUsage =
(currentUserCpuUsage + currentSystemCpuUsage) * currentFrequencyScale;
totalCpuUsage.addValue(currentTotalCpuUsage);
// Save new measurements for next round's deltas.
lastProcStat = procStat;
return true;
}
private int doubleToPercent(double d) {
return (int) (d * 100 + 0.5);
}
private synchronized String getStatString() {
StringBuilder stat = new StringBuilder();
stat.append("CPU User: ")
.append(doubleToPercent(userCpuUsage.getCurrent()))
.append("/")
.append(doubleToPercent(userCpuUsage.getAverage()))
.append(". System: ")
.append(doubleToPercent(systemCpuUsage.getCurrent()))
.append("/")
.append(doubleToPercent(systemCpuUsage.getAverage()))
.append(". Freq: ")
.append(doubleToPercent(frequencyScale.getCurrent()))
.append("/")
.append(doubleToPercent(frequencyScale.getAverage()))
.append(". Total usage: ")
.append(doubleToPercent(totalCpuUsage.getCurrent()))
.append("/")
.append(doubleToPercent(totalCpuUsage.getAverage()))
.append(". Cores: ")
.append(actualCpusPresent);
stat.append("( ");
for (int i = 0; i < cpusPresent; i++) {
stat.append(doubleToPercent(curFreqScales[i])).append(" ");
}
stat.append("). Battery: ").append(getBatteryLevel());
if (cpuOveruse) {
stat.append(". Overuse.");
}
return stat.toString();
}
/**
* Read a single integer value from the named file. Return the read value
* or if an error occurs return 0.
*/
private long readFreqFromFile(String fileName) {
long number = 0;
try (FileInputStream stream = new FileInputStream(fileName);
InputStreamReader streamReader = new InputStreamReader(stream, Charset.forName("UTF-8"));
BufferedReader reader = new BufferedReader(streamReader)) {
String line = reader.readLine();
number = parseLong(line);
} catch (FileNotFoundException e) {
// CPU core is off, so file with its scaling frequency .../cpufreq/scaling_cur_freq
// is not present. This is not an error.
} catch (IOException e) {
// CPU core is off, so file with its scaling frequency .../cpufreq/scaling_cur_freq
// is empty. This is not an error.
}
return number;
}
private static long parseLong(String value) {
long number = 0;
try {
number = Long.parseLong(value);
} catch (NumberFormatException e) {
Log.e(TAG, "parseLong error.", e);
}
return number;
}
/*
* Read the current utilization of all CPUs using the cumulative first line
* of /proc/stat.
*/
@SuppressWarnings("StringSplitter")
private @Nullable ProcStat readProcStat() {
long userTime = 0;
long systemTime = 0;
long idleTime = 0;
try (FileInputStream stream = new FileInputStream("/proc/stat");
InputStreamReader streamReader = new InputStreamReader(stream, Charset.forName("UTF-8"));
BufferedReader reader = new BufferedReader(streamReader)) {
// line should contain something like this:
// cpu 5093818 271838 3512830 165934119 101374 447076 272086 0 0 0
// user nice system idle iowait irq softirq
String line = reader.readLine();
String[] lines = line.split("\\s+");
int length = lines.length;
if (length >= 5) {
userTime = parseLong(lines[1]); // user
userTime += parseLong(lines[2]); // nice
systemTime = parseLong(lines[3]); // system
idleTime = parseLong(lines[4]); // idle
}
if (length >= 8) {
userTime += parseLong(lines[5]); // iowait
systemTime += parseLong(lines[6]); // irq
systemTime += parseLong(lines[7]); // softirq
}
} catch (FileNotFoundException e) {
Log.e(TAG, "Cannot open /proc/stat for reading", e);
return null;
} catch (Exception e) {
Log.e(TAG, "Problems parsing /proc/stat", e);
return null;
}
return new ProcStat(userTime, systemTime, idleTime);
}
}

Просмотреть файл

@ -0,0 +1,346 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc;
import android.support.annotation.Nullable;
import android.util.Log;
import java.util.ArrayList;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.webrtc.IceCandidate;
import org.webrtc.SessionDescription;
/**
* Implementation of AppRTCClient that uses direct TCP connection as the signaling channel.
* This eliminates the need for an external server. This class does not support loopback
* connections.
*/
public class DirectRTCClient implements AppRTCClient, TCPChannelClient.TCPChannelEvents {
private static final String TAG = "DirectRTCClient";
private static final int DEFAULT_PORT = 8888;
// Regex pattern used for checking if room id looks like an IP.
static final Pattern IP_PATTERN = Pattern.compile("("
// IPv4
+ "((\\d+\\.){3}\\d+)|"
// IPv6
+ "\\[((([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?::"
+ "(([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?)\\]|"
+ "\\[(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4})\\]|"
// IPv6 without []
+ "((([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?::(([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?)|"
+ "(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4})|"
// Literals
+ "localhost"
+ ")"
// Optional port number
+ "(:(\\d+))?");
private final ExecutorService executor;
private final SignalingEvents events;
@Nullable
private TCPChannelClient tcpClient;
private RoomConnectionParameters connectionParameters;
private enum ConnectionState { NEW, CONNECTED, CLOSED, ERROR }
// All alterations of the room state should be done from inside the looper thread.
private ConnectionState roomState;
public DirectRTCClient(SignalingEvents events) {
this.events = events;
executor = Executors.newSingleThreadExecutor();
roomState = ConnectionState.NEW;
}
/**
* Connects to the room, roomId in connectionsParameters is required. roomId must be a valid
* IP address matching IP_PATTERN.
*/
@Override
public void connectToRoom(RoomConnectionParameters connectionParameters) {
this.connectionParameters = connectionParameters;
if (connectionParameters.loopback) {
reportError("Loopback connections aren't supported by DirectRTCClient.");
}
executor.execute(new Runnable() {
@Override
public void run() {
connectToRoomInternal();
}
});
}
@Override
public void disconnectFromRoom() {
executor.execute(new Runnable() {
@Override
public void run() {
disconnectFromRoomInternal();
}
});
}
/**
* Connects to the room.
*
* Runs on the looper thread.
*/
private void connectToRoomInternal() {
this.roomState = ConnectionState.NEW;
String endpoint = connectionParameters.roomId;
Matcher matcher = IP_PATTERN.matcher(endpoint);
if (!matcher.matches()) {
reportError("roomId must match IP_PATTERN for DirectRTCClient.");
return;
}
String ip = matcher.group(1);
String portStr = matcher.group(matcher.groupCount());
int port;
if (portStr != null) {
try {
port = Integer.parseInt(portStr);
} catch (NumberFormatException e) {
reportError("Invalid port number: " + portStr);
return;
}
} else {
port = DEFAULT_PORT;
}
tcpClient = new TCPChannelClient(executor, this, ip, port);
}
/**
* Disconnects from the room.
*
* Runs on the looper thread.
*/
private void disconnectFromRoomInternal() {
roomState = ConnectionState.CLOSED;
if (tcpClient != null) {
tcpClient.disconnect();
tcpClient = null;
}
executor.shutdown();
}
@Override
public void sendOfferSdp(final SessionDescription sdp) {
executor.execute(new Runnable() {
@Override
public void run() {
if (roomState != ConnectionState.CONNECTED) {
reportError("Sending offer SDP in non connected state.");
return;
}
JSONObject json = new JSONObject();
jsonPut(json, "sdp", sdp.description);
jsonPut(json, "type", "offer");
sendMessage(json.toString());
}
});
}
@Override
public void sendAnswerSdp(final SessionDescription sdp) {
executor.execute(new Runnable() {
@Override
public void run() {
JSONObject json = new JSONObject();
jsonPut(json, "sdp", sdp.description);
jsonPut(json, "type", "answer");
sendMessage(json.toString());
}
});
}
@Override
public void sendLocalIceCandidate(final IceCandidate candidate) {
executor.execute(new Runnable() {
@Override
public void run() {
JSONObject json = new JSONObject();
jsonPut(json, "type", "candidate");
jsonPut(json, "label", candidate.sdpMLineIndex);
jsonPut(json, "id", candidate.sdpMid);
jsonPut(json, "candidate", candidate.sdp);
if (roomState != ConnectionState.CONNECTED) {
reportError("Sending ICE candidate in non connected state.");
return;
}
sendMessage(json.toString());
}
});
}
/** Send removed Ice candidates to the other participant. */
@Override
public void sendLocalIceCandidateRemovals(final IceCandidate[] candidates) {
executor.execute(new Runnable() {
@Override
public void run() {
JSONObject json = new JSONObject();
jsonPut(json, "type", "remove-candidates");
JSONArray jsonArray = new JSONArray();
for (final IceCandidate candidate : candidates) {
jsonArray.put(toJsonCandidate(candidate));
}
jsonPut(json, "candidates", jsonArray);
if (roomState != ConnectionState.CONNECTED) {
reportError("Sending ICE candidate removals in non connected state.");
return;
}
sendMessage(json.toString());
}
});
}
// -------------------------------------------------------------------
// TCPChannelClient event handlers
/**
* If the client is the server side, this will trigger onConnectedToRoom.
*/
@Override
public void onTCPConnected(boolean isServer) {
if (isServer) {
roomState = ConnectionState.CONNECTED;
SignalingParameters parameters = new SignalingParameters(
// Ice servers are not needed for direct connections.
new ArrayList<>(),
isServer, // Server side acts as the initiator on direct connections.
null, // clientId
null, // wssUrl
null, // wwsPostUrl
null, // offerSdp
null // iceCandidates
);
events.onConnectedToRoom(parameters);
}
}
@Override
public void onTCPMessage(String msg) {
try {
JSONObject json = new JSONObject(msg);
String type = json.optString("type");
if (type.equals("candidate")) {
events.onRemoteIceCandidate(toJavaCandidate(json));
} else if (type.equals("remove-candidates")) {
JSONArray candidateArray = json.getJSONArray("candidates");
IceCandidate[] candidates = new IceCandidate[candidateArray.length()];
for (int i = 0; i < candidateArray.length(); ++i) {
candidates[i] = toJavaCandidate(candidateArray.getJSONObject(i));
}
events.onRemoteIceCandidatesRemoved(candidates);
} else if (type.equals("answer")) {
SessionDescription sdp = new SessionDescription(
SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
events.onRemoteDescription(sdp);
} else if (type.equals("offer")) {
SessionDescription sdp = new SessionDescription(
SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
SignalingParameters parameters = new SignalingParameters(
// Ice servers are not needed for direct connections.
new ArrayList<>(),
false, // This code will only be run on the client side. So, we are not the initiator.
null, // clientId
null, // wssUrl
null, // wssPostUrl
sdp, // offerSdp
null // iceCandidates
);
roomState = ConnectionState.CONNECTED;
events.onConnectedToRoom(parameters);
} else {
reportError("Unexpected TCP message: " + msg);
}
} catch (JSONException e) {
reportError("TCP message JSON parsing error: " + e.toString());
}
}
@Override
public void onTCPError(String description) {
reportError("TCP connection error: " + description);
}
@Override
public void onTCPClose() {
events.onChannelClose();
}
// --------------------------------------------------------------------
// Helper functions.
private void reportError(final String errorMessage) {
Log.e(TAG, errorMessage);
executor.execute(new Runnable() {
@Override
public void run() {
if (roomState != ConnectionState.ERROR) {
roomState = ConnectionState.ERROR;
events.onChannelError(errorMessage);
}
}
});
}
private void sendMessage(final String message) {
executor.execute(new Runnable() {
@Override
public void run() {
tcpClient.send(message);
}
});
}
// Put a |key|->|value| mapping in |json|.
private static void jsonPut(JSONObject json, String key, Object value) {
try {
json.put(key, value);
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
// Converts a Java candidate to a JSONObject.
private static JSONObject toJsonCandidate(final IceCandidate candidate) {
JSONObject json = new JSONObject();
jsonPut(json, "label", candidate.sdpMLineIndex);
jsonPut(json, "id", candidate.sdpMid);
jsonPut(json, "candidate", candidate.sdp);
return json;
}
// Converts a JSON candidate to a Java object.
private static IceCandidate toJavaCandidate(JSONObject json) throws JSONException {
return new IceCandidate(
json.getString("id"), json.getInt("label"), json.getString("candidate"));
}
}

Просмотреть файл

@ -0,0 +1,204 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc;
import android.app.Fragment;
import android.os.Bundle;
import android.util.TypedValue;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageButton;
import android.widget.TextView;
import org.webrtc.StatsReport;
import java.util.HashMap;
import java.util.Map;
/**
* Fragment for HUD statistics display.
*/
public class HudFragment extends Fragment {
private TextView encoderStatView;
private TextView hudViewBwe;
private TextView hudViewConnection;
private TextView hudViewVideoSend;
private TextView hudViewVideoRecv;
private ImageButton toggleDebugButton;
private boolean videoCallEnabled;
private boolean displayHud;
private volatile boolean isRunning;
private CpuMonitor cpuMonitor;
@Override
public View onCreateView(
LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View controlView = inflater.inflate(R.layout.fragment_hud, container, false);
// Create UI controls.
encoderStatView = controlView.findViewById(R.id.encoder_stat_call);
hudViewBwe = controlView.findViewById(R.id.hud_stat_bwe);
hudViewConnection = controlView.findViewById(R.id.hud_stat_connection);
hudViewVideoSend = controlView.findViewById(R.id.hud_stat_video_send);
hudViewVideoRecv = controlView.findViewById(R.id.hud_stat_video_recv);
toggleDebugButton = controlView.findViewById(R.id.button_toggle_debug);
toggleDebugButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (displayHud) {
int visibility =
(hudViewBwe.getVisibility() == View.VISIBLE) ? View.INVISIBLE : View.VISIBLE;
hudViewsSetProperties(visibility);
}
}
});
return controlView;
}
@Override
public void onStart() {
super.onStart();
Bundle args = getArguments();
if (args != null) {
videoCallEnabled = args.getBoolean(CallActivity.EXTRA_VIDEO_CALL, true);
displayHud = args.getBoolean(CallActivity.EXTRA_DISPLAY_HUD, false);
}
int visibility = displayHud ? View.VISIBLE : View.INVISIBLE;
encoderStatView.setVisibility(visibility);
toggleDebugButton.setVisibility(visibility);
hudViewsSetProperties(View.INVISIBLE);
isRunning = true;
}
@Override
public void onStop() {
isRunning = false;
super.onStop();
}
public void setCpuMonitor(CpuMonitor cpuMonitor) {
this.cpuMonitor = cpuMonitor;
}
private void hudViewsSetProperties(int visibility) {
hudViewBwe.setVisibility(visibility);
hudViewConnection.setVisibility(visibility);
hudViewVideoSend.setVisibility(visibility);
hudViewVideoRecv.setVisibility(visibility);
hudViewBwe.setTextSize(TypedValue.COMPLEX_UNIT_PT, 5);
hudViewConnection.setTextSize(TypedValue.COMPLEX_UNIT_PT, 5);
hudViewVideoSend.setTextSize(TypedValue.COMPLEX_UNIT_PT, 5);
hudViewVideoRecv.setTextSize(TypedValue.COMPLEX_UNIT_PT, 5);
}
private Map<String, String> getReportMap(StatsReport report) {
Map<String, String> reportMap = new HashMap<>();
for (StatsReport.Value value : report.values) {
reportMap.put(value.name, value.value);
}
return reportMap;
}
public void updateEncoderStatistics(final StatsReport[] reports) {
if (!isRunning || !displayHud) {
return;
}
StringBuilder encoderStat = new StringBuilder(128);
StringBuilder bweStat = new StringBuilder();
StringBuilder connectionStat = new StringBuilder();
StringBuilder videoSendStat = new StringBuilder();
StringBuilder videoRecvStat = new StringBuilder();
String fps = null;
String targetBitrate = null;
String actualBitrate = null;
for (StatsReport report : reports) {
if (report.type.equals("ssrc") && report.id.contains("ssrc") && report.id.contains("send")) {
// Send video statistics.
Map<String, String> reportMap = getReportMap(report);
String trackId = reportMap.get("googTrackId");
if (trackId != null && trackId.contains(PeerConnectionClient.VIDEO_TRACK_ID)) {
fps = reportMap.get("googFrameRateSent");
videoSendStat.append(report.id).append("\n");
for (StatsReport.Value value : report.values) {
String name = value.name.replace("goog", "");
videoSendStat.append(name).append("=").append(value.value).append("\n");
}
}
} else if (report.type.equals("ssrc") && report.id.contains("ssrc")
&& report.id.contains("recv")) {
// Receive video statistics.
Map<String, String> reportMap = getReportMap(report);
// Check if this stat is for video track.
String frameWidth = reportMap.get("googFrameWidthReceived");
if (frameWidth != null) {
videoRecvStat.append(report.id).append("\n");
for (StatsReport.Value value : report.values) {
String name = value.name.replace("goog", "");
videoRecvStat.append(name).append("=").append(value.value).append("\n");
}
}
} else if (report.id.equals("bweforvideo")) {
// BWE statistics.
Map<String, String> reportMap = getReportMap(report);
targetBitrate = reportMap.get("googTargetEncBitrate");
actualBitrate = reportMap.get("googActualEncBitrate");
bweStat.append(report.id).append("\n");
for (StatsReport.Value value : report.values) {
String name = value.name.replace("goog", "").replace("Available", "");
bweStat.append(name).append("=").append(value.value).append("\n");
}
} else if (report.type.equals("googCandidatePair")) {
// Connection statistics.
Map<String, String> reportMap = getReportMap(report);
String activeConnection = reportMap.get("googActiveConnection");
if (activeConnection != null && activeConnection.equals("true")) {
connectionStat.append(report.id).append("\n");
for (StatsReport.Value value : report.values) {
String name = value.name.replace("goog", "");
connectionStat.append(name).append("=").append(value.value).append("\n");
}
}
}
}
hudViewBwe.setText(bweStat.toString());
hudViewConnection.setText(connectionStat.toString());
hudViewVideoSend.setText(videoSendStat.toString());
hudViewVideoRecv.setText(videoRecvStat.toString());
if (videoCallEnabled) {
if (fps != null) {
encoderStat.append("Fps: ").append(fps).append("\n");
}
if (targetBitrate != null) {
encoderStat.append("Target BR: ").append(targetBitrate).append("\n");
}
if (actualBitrate != null) {
encoderStat.append("Actual BR: ").append(actualBitrate).append("\n");
}
}
if (cpuMonitor != null) {
encoderStat.append("CPU%: ")
.append(cpuMonitor.getCpuUsageCurrent())
.append("/")
.append(cpuMonitor.getCpuUsageAverage())
.append(". Freq: ")
.append(cpuMonitor.getFrequencyScaleAverage());
}
encoderStatView.setText(encoderStat.toString());
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,143 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc;
import android.media.AudioFormat;
import android.os.Environment;
import android.support.annotation.Nullable;
import android.util.Log;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.concurrent.ExecutorService;
import org.webrtc.audio.JavaAudioDeviceModule;
import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback;
/**
* Implements the AudioRecordSamplesReadyCallback interface and writes
* recorded raw audio samples to an output file.
*/
public class RecordedAudioToFileController implements SamplesReadyCallback {
private static final String TAG = "RecordedAudioToFile";
private static final long MAX_FILE_SIZE_IN_BYTES = 58348800L;
private final Object lock = new Object();
private final ExecutorService executor;
@Nullable private OutputStream rawAudioFileOutputStream;
private boolean isRunning;
private long fileSizeInBytes;
public RecordedAudioToFileController(ExecutorService executor) {
Log.d(TAG, "ctor");
this.executor = executor;
}
/**
* Should be called on the same executor thread as the one provided at
* construction.
*/
public boolean start() {
Log.d(TAG, "start");
if (!isExternalStorageWritable()) {
Log.e(TAG, "Writing to external media is not possible");
return false;
}
synchronized (lock) {
isRunning = true;
}
return true;
}
/**
* Should be called on the same executor thread as the one provided at
* construction.
*/
public void stop() {
Log.d(TAG, "stop");
synchronized (lock) {
isRunning = false;
if (rawAudioFileOutputStream != null) {
try {
rawAudioFileOutputStream.close();
} catch (IOException e) {
Log.e(TAG, "Failed to close file with saved input audio: " + e);
}
rawAudioFileOutputStream = null;
}
fileSizeInBytes = 0;
}
}
// Checks if external storage is available for read and write.
private boolean isExternalStorageWritable() {
String state = Environment.getExternalStorageState();
if (Environment.MEDIA_MOUNTED.equals(state)) {
return true;
}
return false;
}
// Utilizes audio parameters to create a file name which contains sufficient
// information so that the file can be played using an external file player.
// Example: /sdcard/recorded_audio_16bits_48000Hz_mono.pcm.
private void openRawAudioOutputFile(int sampleRate, int channelCount) {
final String fileName = Environment.getExternalStorageDirectory().getPath() + File.separator
+ "recorded_audio_16bits_" + String.valueOf(sampleRate) + "Hz"
+ ((channelCount == 1) ? "_mono" : "_stereo") + ".pcm";
final File outputFile = new File(fileName);
try {
rawAudioFileOutputStream = new FileOutputStream(outputFile);
} catch (FileNotFoundException e) {
Log.e(TAG, "Failed to open audio output file: " + e.getMessage());
}
Log.d(TAG, "Opened file for recording: " + fileName);
}
// Called when new audio samples are ready.
@Override
public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples samples) {
// The native audio layer on Android should use 16-bit PCM format.
if (samples.getAudioFormat() != AudioFormat.ENCODING_PCM_16BIT) {
Log.e(TAG, "Invalid audio format");
return;
}
synchronized (lock) {
// Abort early if stop() has been called.
if (!isRunning) {
return;
}
// Open a new file for the first callback only since it allows us to add audio parameters to
// the file name.
if (rawAudioFileOutputStream == null) {
openRawAudioOutputFile(samples.getSampleRate(), samples.getChannelCount());
fileSizeInBytes = 0;
}
}
// Append the recorded 16-bit audio samples to the open output file.
executor.execute(() -> {
if (rawAudioFileOutputStream != null) {
try {
// Set a limit on max file size. 58348800 bytes corresponds to
// approximately 10 minutes of recording in mono at 48kHz.
if (fileSizeInBytes < MAX_FILE_SIZE_IN_BYTES) {
// Writes samples.getData().length bytes to output stream.
rawAudioFileOutputStream.write(samples.getData());
fileSizeInBytes += samples.getData().length;
}
} catch (IOException e) {
Log.e(TAG, "Failed to write audio to file: " + e.getMessage());
}
}
});
}
}

Просмотреть файл

@ -0,0 +1,225 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc;
import android.util.Log;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.ArrayList;
import java.util.Scanner;
import java.util.List;
import org.appspot.apprtc.AppRTCClient.SignalingParameters;
import org.appspot.apprtc.util.AsyncHttpURLConnection;
import org.appspot.apprtc.util.AsyncHttpURLConnection.AsyncHttpEvents;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.webrtc.IceCandidate;
import org.webrtc.PeerConnection;
import org.webrtc.SessionDescription;
/**
* AsyncTask that converts an AppRTC room URL into the set of signaling
* parameters to use with that room.
*/
public class RoomParametersFetcher {
private static final String TAG = "RoomRTCClient";
private static final int TURN_HTTP_TIMEOUT_MS = 5000;
private final RoomParametersFetcherEvents events;
private final String roomUrl;
private final String roomMessage;
/**
* Room parameters fetcher callbacks.
*/
public interface RoomParametersFetcherEvents {
/**
* Callback fired once the room's signaling parameters
* SignalingParameters are extracted.
*/
void onSignalingParametersReady(final SignalingParameters params);
/**
* Callback for room parameters extraction error.
*/
void onSignalingParametersError(final String description);
}
public RoomParametersFetcher(
String roomUrl, String roomMessage, final RoomParametersFetcherEvents events) {
this.roomUrl = roomUrl;
this.roomMessage = roomMessage;
this.events = events;
}
public void makeRequest() {
Log.d(TAG, "Connecting to room: " + roomUrl);
AsyncHttpURLConnection httpConnection =
new AsyncHttpURLConnection("POST", roomUrl, roomMessage, new AsyncHttpEvents() {
@Override
public void onHttpError(String errorMessage) {
Log.e(TAG, "Room connection error: " + errorMessage);
events.onSignalingParametersError(errorMessage);
}
@Override
public void onHttpComplete(String response) {
roomHttpResponseParse(response);
}
});
httpConnection.send();
}
private void roomHttpResponseParse(String response) {
Log.d(TAG, "Room response: " + response);
try {
List<IceCandidate> iceCandidates = null;
SessionDescription offerSdp = null;
JSONObject roomJson = new JSONObject(response);
String result = roomJson.getString("result");
if (!result.equals("SUCCESS")) {
events.onSignalingParametersError("Room response error: " + result);
return;
}
response = roomJson.getString("params");
roomJson = new JSONObject(response);
String roomId = roomJson.getString("room_id");
String clientId = roomJson.getString("client_id");
String wssUrl = roomJson.getString("wss_url");
String wssPostUrl = roomJson.getString("wss_post_url");
boolean initiator = (roomJson.getBoolean("is_initiator"));
if (!initiator) {
iceCandidates = new ArrayList<>();
String messagesString = roomJson.getString("messages");
JSONArray messages = new JSONArray(messagesString);
for (int i = 0; i < messages.length(); ++i) {
String messageString = messages.getString(i);
JSONObject message = new JSONObject(messageString);
String messageType = message.getString("type");
Log.d(TAG, "GAE->C #" + i + " : " + messageString);
if (messageType.equals("offer")) {
offerSdp = new SessionDescription(
SessionDescription.Type.fromCanonicalForm(messageType), message.getString("sdp"));
} else if (messageType.equals("candidate")) {
IceCandidate candidate = new IceCandidate(
message.getString("id"), message.getInt("label"), message.getString("candidate"));
iceCandidates.add(candidate);
} else {
Log.e(TAG, "Unknown message: " + messageString);
}
}
}
Log.d(TAG, "RoomId: " + roomId + ". ClientId: " + clientId);
Log.d(TAG, "Initiator: " + initiator);
Log.d(TAG, "WSS url: " + wssUrl);
Log.d(TAG, "WSS POST url: " + wssPostUrl);
List<PeerConnection.IceServer> iceServers =
iceServersFromPCConfigJSON(roomJson.getString("pc_config"));
boolean isTurnPresent = false;
for (PeerConnection.IceServer server : iceServers) {
Log.d(TAG, "IceServer: " + server);
for (String uri : server.urls) {
if (uri.startsWith("turn:")) {
isTurnPresent = true;
break;
}
}
}
// Request TURN servers.
if (!isTurnPresent && !roomJson.optString("ice_server_url").isEmpty()) {
List<PeerConnection.IceServer> turnServers =
requestTurnServers(roomJson.getString("ice_server_url"));
for (PeerConnection.IceServer turnServer : turnServers) {
Log.d(TAG, "TurnServer: " + turnServer);
iceServers.add(turnServer);
}
}
SignalingParameters params = new SignalingParameters(
iceServers, initiator, clientId, wssUrl, wssPostUrl, offerSdp, iceCandidates);
events.onSignalingParametersReady(params);
} catch (JSONException e) {
events.onSignalingParametersError("Room JSON parsing error: " + e.toString());
} catch (IOException e) {
events.onSignalingParametersError("Room IO error: " + e.toString());
}
}
// Requests & returns a TURN ICE Server based on a request URL. Must be run
// off the main thread!
private List<PeerConnection.IceServer> requestTurnServers(String url)
throws IOException, JSONException {
List<PeerConnection.IceServer> turnServers = new ArrayList<>();
Log.d(TAG, "Request TURN from: " + url);
HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
connection.setDoOutput(true);
connection.setRequestProperty("REFERER", "https://appr.tc");
connection.setConnectTimeout(TURN_HTTP_TIMEOUT_MS);
connection.setReadTimeout(TURN_HTTP_TIMEOUT_MS);
int responseCode = connection.getResponseCode();
if (responseCode != 200) {
throw new IOException("Non-200 response when requesting TURN server from " + url + " : "
+ connection.getHeaderField(null));
}
InputStream responseStream = connection.getInputStream();
String response = drainStream(responseStream);
connection.disconnect();
Log.d(TAG, "TURN response: " + response);
JSONObject responseJSON = new JSONObject(response);
JSONArray iceServers = responseJSON.getJSONArray("iceServers");
for (int i = 0; i < iceServers.length(); ++i) {
JSONObject server = iceServers.getJSONObject(i);
JSONArray turnUrls = server.getJSONArray("urls");
String username = server.has("username") ? server.getString("username") : "";
String credential = server.has("credential") ? server.getString("credential") : "";
for (int j = 0; j < turnUrls.length(); j++) {
String turnUrl = turnUrls.getString(j);
PeerConnection.IceServer turnServer =
PeerConnection.IceServer.builder(turnUrl)
.setUsername(username)
.setPassword(credential)
.createIceServer();
turnServers.add(turnServer);
}
}
return turnServers;
}
// Return the list of ICE servers described by a WebRTCPeerConnection
// configuration string.
private List<PeerConnection.IceServer> iceServersFromPCConfigJSON(String pcConfig)
throws JSONException {
JSONObject json = new JSONObject(pcConfig);
JSONArray servers = json.getJSONArray("iceServers");
List<PeerConnection.IceServer> ret = new ArrayList<>();
for (int i = 0; i < servers.length(); ++i) {
JSONObject server = servers.getJSONObject(i);
String url = server.getString("urls");
String credential = server.has("credential") ? server.getString("credential") : "";
PeerConnection.IceServer turnServer =
PeerConnection.IceServer.builder(url)
.setPassword(credential)
.createIceServer();
ret.add(turnServer);
}
return ret;
}
// Return the contents of an InputStream as a String.
private static String drainStream(InputStream in) {
Scanner s = new Scanner(in, "UTF-8").useDelimiter("\\A");
return s.hasNext() ? s.next() : "";
}
}

Просмотреть файл

@ -0,0 +1,73 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc;
import android.os.ParcelFileDescriptor;
import android.util.Log;
import java.io.File;
import java.io.IOException;
import org.webrtc.PeerConnection;
public class RtcEventLog {
private static final String TAG = "RtcEventLog";
private static final int OUTPUT_FILE_MAX_BYTES = 10_000_000;
private final PeerConnection peerConnection;
private RtcEventLogState state = RtcEventLogState.INACTIVE;
enum RtcEventLogState {
INACTIVE,
STARTED,
STOPPED,
}
public RtcEventLog(PeerConnection peerConnection) {
if (peerConnection == null) {
throw new NullPointerException("The peer connection is null.");
}
this.peerConnection = peerConnection;
}
public void start(final File outputFile) {
if (state == RtcEventLogState.STARTED) {
Log.e(TAG, "RtcEventLog has already started.");
return;
}
final ParcelFileDescriptor fileDescriptor;
try {
fileDescriptor = ParcelFileDescriptor.open(outputFile,
ParcelFileDescriptor.MODE_READ_WRITE | ParcelFileDescriptor.MODE_CREATE
| ParcelFileDescriptor.MODE_TRUNCATE);
} catch (IOException e) {
Log.e(TAG, "Failed to create a new file", e);
return;
}
// Passes ownership of the file to WebRTC.
boolean success =
peerConnection.startRtcEventLog(fileDescriptor.detachFd(), OUTPUT_FILE_MAX_BYTES);
if (!success) {
Log.e(TAG, "Failed to start RTC event log.");
return;
}
state = RtcEventLogState.STARTED;
Log.d(TAG, "RtcEventLog started.");
}
public void stop() {
if (state != RtcEventLogState.STARTED) {
Log.e(TAG, "RtcEventLog was not started.");
return;
}
peerConnection.stopRtcEventLog();
state = RtcEventLogState.STOPPED;
Log.d(TAG, "RtcEventLog stopped.");
}
}

Просмотреть файл

@ -0,0 +1,317 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc;
import android.app.Activity;
import android.content.SharedPreferences;
import android.content.SharedPreferences.OnSharedPreferenceChangeListener;
import android.os.Bundle;
import android.preference.ListPreference;
import android.preference.Preference;
import org.webrtc.Camera2Enumerator;
import org.webrtc.audio.JavaAudioDeviceModule;
/**
* Settings activity for AppRTC.
*/
public class SettingsActivity extends Activity implements OnSharedPreferenceChangeListener {
private SettingsFragment settingsFragment;
private String keyprefVideoCall;
private String keyprefScreencapture;
private String keyprefCamera2;
private String keyprefResolution;
private String keyprefFps;
private String keyprefCaptureQualitySlider;
private String keyprefMaxVideoBitrateType;
private String keyprefMaxVideoBitrateValue;
private String keyPrefVideoCodec;
private String keyprefHwCodec;
private String keyprefCaptureToTexture;
private String keyprefFlexfec;
private String keyprefStartAudioBitrateType;
private String keyprefStartAudioBitrateValue;
private String keyPrefAudioCodec;
private String keyprefNoAudioProcessing;
private String keyprefAecDump;
private String keyprefEnableSaveInputAudioToFile;
private String keyprefOpenSLES;
private String keyprefDisableBuiltInAEC;
private String keyprefDisableBuiltInAGC;
private String keyprefDisableBuiltInNS;
private String keyprefDisableWebRtcAGCAndHPF;
private String keyprefSpeakerphone;
private String keyPrefRoomServerUrl;
private String keyPrefDisplayHud;
private String keyPrefTracing;
private String keyprefEnabledRtcEventLog;
private String keyprefEnableDataChannel;
private String keyprefOrdered;
private String keyprefMaxRetransmitTimeMs;
private String keyprefMaxRetransmits;
private String keyprefDataProtocol;
private String keyprefNegotiated;
private String keyprefDataId;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
keyprefVideoCall = getString(R.string.pref_videocall_key);
keyprefScreencapture = getString(R.string.pref_screencapture_key);
keyprefCamera2 = getString(R.string.pref_camera2_key);
keyprefResolution = getString(R.string.pref_resolution_key);
keyprefFps = getString(R.string.pref_fps_key);
keyprefCaptureQualitySlider = getString(R.string.pref_capturequalityslider_key);
keyprefMaxVideoBitrateType = getString(R.string.pref_maxvideobitrate_key);
keyprefMaxVideoBitrateValue = getString(R.string.pref_maxvideobitratevalue_key);
keyPrefVideoCodec = getString(R.string.pref_videocodec_key);
keyprefHwCodec = getString(R.string.pref_hwcodec_key);
keyprefCaptureToTexture = getString(R.string.pref_capturetotexture_key);
keyprefFlexfec = getString(R.string.pref_flexfec_key);
keyprefStartAudioBitrateType = getString(R.string.pref_startaudiobitrate_key);
keyprefStartAudioBitrateValue = getString(R.string.pref_startaudiobitratevalue_key);
keyPrefAudioCodec = getString(R.string.pref_audiocodec_key);
keyprefNoAudioProcessing = getString(R.string.pref_noaudioprocessing_key);
keyprefAecDump = getString(R.string.pref_aecdump_key);
keyprefEnableSaveInputAudioToFile =
getString(R.string.pref_enable_save_input_audio_to_file_key);
keyprefOpenSLES = getString(R.string.pref_opensles_key);
keyprefDisableBuiltInAEC = getString(R.string.pref_disable_built_in_aec_key);
keyprefDisableBuiltInAGC = getString(R.string.pref_disable_built_in_agc_key);
keyprefDisableBuiltInNS = getString(R.string.pref_disable_built_in_ns_key);
keyprefDisableWebRtcAGCAndHPF = getString(R.string.pref_disable_webrtc_agc_and_hpf_key);
keyprefSpeakerphone = getString(R.string.pref_speakerphone_key);
keyprefEnableDataChannel = getString(R.string.pref_enable_datachannel_key);
keyprefOrdered = getString(R.string.pref_ordered_key);
keyprefMaxRetransmitTimeMs = getString(R.string.pref_max_retransmit_time_ms_key);
keyprefMaxRetransmits = getString(R.string.pref_max_retransmits_key);
keyprefDataProtocol = getString(R.string.pref_data_protocol_key);
keyprefNegotiated = getString(R.string.pref_negotiated_key);
keyprefDataId = getString(R.string.pref_data_id_key);
keyPrefRoomServerUrl = getString(R.string.pref_room_server_url_key);
keyPrefDisplayHud = getString(R.string.pref_displayhud_key);
keyPrefTracing = getString(R.string.pref_tracing_key);
keyprefEnabledRtcEventLog = getString(R.string.pref_enable_rtceventlog_key);
// Display the fragment as the main content.
settingsFragment = new SettingsFragment();
getFragmentManager()
.beginTransaction()
.replace(android.R.id.content, settingsFragment)
.commit();
}
@Override
protected void onResume() {
super.onResume();
// Set summary to be the user-description for the selected value
SharedPreferences sharedPreferences =
settingsFragment.getPreferenceScreen().getSharedPreferences();
sharedPreferences.registerOnSharedPreferenceChangeListener(this);
updateSummaryB(sharedPreferences, keyprefVideoCall);
updateSummaryB(sharedPreferences, keyprefScreencapture);
updateSummaryB(sharedPreferences, keyprefCamera2);
updateSummary(sharedPreferences, keyprefResolution);
updateSummary(sharedPreferences, keyprefFps);
updateSummaryB(sharedPreferences, keyprefCaptureQualitySlider);
updateSummary(sharedPreferences, keyprefMaxVideoBitrateType);
updateSummaryBitrate(sharedPreferences, keyprefMaxVideoBitrateValue);
setVideoBitrateEnable(sharedPreferences);
updateSummary(sharedPreferences, keyPrefVideoCodec);
updateSummaryB(sharedPreferences, keyprefHwCodec);
updateSummaryB(sharedPreferences, keyprefCaptureToTexture);
updateSummaryB(sharedPreferences, keyprefFlexfec);
updateSummary(sharedPreferences, keyprefStartAudioBitrateType);
updateSummaryBitrate(sharedPreferences, keyprefStartAudioBitrateValue);
setAudioBitrateEnable(sharedPreferences);
updateSummary(sharedPreferences, keyPrefAudioCodec);
updateSummaryB(sharedPreferences, keyprefNoAudioProcessing);
updateSummaryB(sharedPreferences, keyprefAecDump);
updateSummaryB(sharedPreferences, keyprefEnableSaveInputAudioToFile);
updateSummaryB(sharedPreferences, keyprefOpenSLES);
updateSummaryB(sharedPreferences, keyprefDisableBuiltInAEC);
updateSummaryB(sharedPreferences, keyprefDisableBuiltInAGC);
updateSummaryB(sharedPreferences, keyprefDisableBuiltInNS);
updateSummaryB(sharedPreferences, keyprefDisableWebRtcAGCAndHPF);
updateSummaryList(sharedPreferences, keyprefSpeakerphone);
updateSummaryB(sharedPreferences, keyprefEnableDataChannel);
updateSummaryB(sharedPreferences, keyprefOrdered);
updateSummary(sharedPreferences, keyprefMaxRetransmitTimeMs);
updateSummary(sharedPreferences, keyprefMaxRetransmits);
updateSummary(sharedPreferences, keyprefDataProtocol);
updateSummaryB(sharedPreferences, keyprefNegotiated);
updateSummary(sharedPreferences, keyprefDataId);
setDataChannelEnable(sharedPreferences);
updateSummary(sharedPreferences, keyPrefRoomServerUrl);
updateSummaryB(sharedPreferences, keyPrefDisplayHud);
updateSummaryB(sharedPreferences, keyPrefTracing);
updateSummaryB(sharedPreferences, keyprefEnabledRtcEventLog);
if (!Camera2Enumerator.isSupported(this)) {
Preference camera2Preference = settingsFragment.findPreference(keyprefCamera2);
camera2Preference.setSummary(getString(R.string.pref_camera2_not_supported));
camera2Preference.setEnabled(false);
}
if (!JavaAudioDeviceModule.isBuiltInAcousticEchoCancelerSupported()) {
Preference disableBuiltInAECPreference =
settingsFragment.findPreference(keyprefDisableBuiltInAEC);
disableBuiltInAECPreference.setSummary(getString(R.string.pref_built_in_aec_not_available));
disableBuiltInAECPreference.setEnabled(false);
}
Preference disableBuiltInAGCPreference =
settingsFragment.findPreference(keyprefDisableBuiltInAGC);
disableBuiltInAGCPreference.setSummary(getString(R.string.pref_built_in_agc_not_available));
disableBuiltInAGCPreference.setEnabled(false);
if (!JavaAudioDeviceModule.isBuiltInNoiseSuppressorSupported()) {
Preference disableBuiltInNSPreference =
settingsFragment.findPreference(keyprefDisableBuiltInNS);
disableBuiltInNSPreference.setSummary(getString(R.string.pref_built_in_ns_not_available));
disableBuiltInNSPreference.setEnabled(false);
}
}
@Override
protected void onPause() {
super.onPause();
SharedPreferences sharedPreferences =
settingsFragment.getPreferenceScreen().getSharedPreferences();
sharedPreferences.unregisterOnSharedPreferenceChangeListener(this);
}
@Override
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
// clang-format off
if (key.equals(keyprefResolution)
|| key.equals(keyprefFps)
|| key.equals(keyprefMaxVideoBitrateType)
|| key.equals(keyPrefVideoCodec)
|| key.equals(keyprefStartAudioBitrateType)
|| key.equals(keyPrefAudioCodec)
|| key.equals(keyPrefRoomServerUrl)
|| key.equals(keyprefMaxRetransmitTimeMs)
|| key.equals(keyprefMaxRetransmits)
|| key.equals(keyprefDataProtocol)
|| key.equals(keyprefDataId)) {
updateSummary(sharedPreferences, key);
} else if (key.equals(keyprefMaxVideoBitrateValue)
|| key.equals(keyprefStartAudioBitrateValue)) {
updateSummaryBitrate(sharedPreferences, key);
} else if (key.equals(keyprefVideoCall)
|| key.equals(keyprefScreencapture)
|| key.equals(keyprefCamera2)
|| key.equals(keyPrefTracing)
|| key.equals(keyprefCaptureQualitySlider)
|| key.equals(keyprefHwCodec)
|| key.equals(keyprefCaptureToTexture)
|| key.equals(keyprefFlexfec)
|| key.equals(keyprefNoAudioProcessing)
|| key.equals(keyprefAecDump)
|| key.equals(keyprefEnableSaveInputAudioToFile)
|| key.equals(keyprefOpenSLES)
|| key.equals(keyprefDisableBuiltInAEC)
|| key.equals(keyprefDisableBuiltInAGC)
|| key.equals(keyprefDisableBuiltInNS)
|| key.equals(keyprefDisableWebRtcAGCAndHPF)
|| key.equals(keyPrefDisplayHud)
|| key.equals(keyprefEnableDataChannel)
|| key.equals(keyprefOrdered)
|| key.equals(keyprefNegotiated)
|| key.equals(keyprefEnabledRtcEventLog)) {
updateSummaryB(sharedPreferences, key);
} else if (key.equals(keyprefSpeakerphone)) {
updateSummaryList(sharedPreferences, key);
}
// clang-format on
if (key.equals(keyprefMaxVideoBitrateType)) {
setVideoBitrateEnable(sharedPreferences);
}
if (key.equals(keyprefStartAudioBitrateType)) {
setAudioBitrateEnable(sharedPreferences);
}
if (key.equals(keyprefEnableDataChannel)) {
setDataChannelEnable(sharedPreferences);
}
}
private void updateSummary(SharedPreferences sharedPreferences, String key) {
Preference updatedPref = settingsFragment.findPreference(key);
// Set summary to be the user-description for the selected value
updatedPref.setSummary(sharedPreferences.getString(key, ""));
}
private void updateSummaryBitrate(SharedPreferences sharedPreferences, String key) {
Preference updatedPref = settingsFragment.findPreference(key);
updatedPref.setSummary(sharedPreferences.getString(key, "") + " kbps");
}
private void updateSummaryB(SharedPreferences sharedPreferences, String key) {
Preference updatedPref = settingsFragment.findPreference(key);
updatedPref.setSummary(sharedPreferences.getBoolean(key, true)
? getString(R.string.pref_value_enabled)
: getString(R.string.pref_value_disabled));
}
private void updateSummaryList(SharedPreferences sharedPreferences, String key) {
ListPreference updatedPref = (ListPreference) settingsFragment.findPreference(key);
updatedPref.setSummary(updatedPref.getEntry());
}
private void setVideoBitrateEnable(SharedPreferences sharedPreferences) {
Preference bitratePreferenceValue =
settingsFragment.findPreference(keyprefMaxVideoBitrateValue);
String bitrateTypeDefault = getString(R.string.pref_maxvideobitrate_default);
String bitrateType =
sharedPreferences.getString(keyprefMaxVideoBitrateType, bitrateTypeDefault);
if (bitrateType.equals(bitrateTypeDefault)) {
bitratePreferenceValue.setEnabled(false);
} else {
bitratePreferenceValue.setEnabled(true);
}
}
private void setAudioBitrateEnable(SharedPreferences sharedPreferences) {
Preference bitratePreferenceValue =
settingsFragment.findPreference(keyprefStartAudioBitrateValue);
String bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default);
String bitrateType =
sharedPreferences.getString(keyprefStartAudioBitrateType, bitrateTypeDefault);
if (bitrateType.equals(bitrateTypeDefault)) {
bitratePreferenceValue.setEnabled(false);
} else {
bitratePreferenceValue.setEnabled(true);
}
}
private void setDataChannelEnable(SharedPreferences sharedPreferences) {
boolean enabled = sharedPreferences.getBoolean(keyprefEnableDataChannel, true);
settingsFragment.findPreference(keyprefOrdered).setEnabled(enabled);
settingsFragment.findPreference(keyprefMaxRetransmitTimeMs).setEnabled(enabled);
settingsFragment.findPreference(keyprefMaxRetransmits).setEnabled(enabled);
settingsFragment.findPreference(keyprefDataProtocol).setEnabled(enabled);
settingsFragment.findPreference(keyprefNegotiated).setEnabled(enabled);
settingsFragment.findPreference(keyprefDataId).setEnabled(enabled);
}
}

Просмотреть файл

@ -0,0 +1,26 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc;
import android.os.Bundle;
import android.preference.PreferenceFragment;
/**
* Settings fragment for AppRTC.
*/
public class SettingsFragment extends PreferenceFragment {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Load the preferences from an XML resource
addPreferencesFromResource(R.xml.preferences);
}
}

Просмотреть файл

@ -0,0 +1,362 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc;
import android.support.annotation.Nullable;
import android.util.Log;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.UnknownHostException;
import java.nio.charset.Charset;
import java.util.concurrent.ExecutorService;
import org.webrtc.ThreadUtils;
/**
* Replacement for WebSocketChannelClient for direct communication between two IP addresses. Handles
* the signaling between the two clients using a TCP connection.
* <p>
* All public methods should be called from a looper executor thread
* passed in a constructor, otherwise exception will be thrown.
* All events are dispatched on the same thread.
*/
public class TCPChannelClient {
private static final String TAG = "TCPChannelClient";
private final ExecutorService executor;
private final ThreadUtils.ThreadChecker executorThreadCheck;
private final TCPChannelEvents eventListener;
private TCPSocket socket;
/**
* Callback interface for messages delivered on TCP Connection. All callbacks are invoked from the
* looper executor thread.
*/
public interface TCPChannelEvents {
void onTCPConnected(boolean server);
void onTCPMessage(String message);
void onTCPError(String description);
void onTCPClose();
}
/**
* Initializes the TCPChannelClient. If IP is a local IP address, starts a listening server on
* that IP. If not, instead connects to the IP.
*
* @param eventListener Listener that will receive events from the client.
* @param ip IP address to listen on or connect to.
* @param port Port to listen on or connect to.
*/
public TCPChannelClient(
ExecutorService executor, TCPChannelEvents eventListener, String ip, int port) {
this.executor = executor;
executorThreadCheck = new ThreadUtils.ThreadChecker();
executorThreadCheck.detachThread();
this.eventListener = eventListener;
InetAddress address;
try {
address = InetAddress.getByName(ip);
} catch (UnknownHostException e) {
reportError("Invalid IP address.");
return;
}
if (address.isAnyLocalAddress()) {
socket = new TCPSocketServer(address, port);
} else {
socket = new TCPSocketClient(address, port);
}
socket.start();
}
/**
* Disconnects the client if not already disconnected. This will fire the onTCPClose event.
*/
public void disconnect() {
executorThreadCheck.checkIsOnValidThread();
socket.disconnect();
}
/**
* Sends a message on the socket.
*
* @param message Message to be sent.
*/
public void send(String message) {
executorThreadCheck.checkIsOnValidThread();
socket.send(message);
}
/**
* Helper method for firing onTCPError events. Calls onTCPError on the executor thread.
*/
private void reportError(final String message) {
Log.e(TAG, "TCP Error: " + message);
executor.execute(new Runnable() {
@Override
public void run() {
eventListener.onTCPError(message);
}
});
}
/**
* Base class for server and client sockets. Contains a listening thread that will call
* eventListener.onTCPMessage on new messages.
*/
private abstract class TCPSocket extends Thread {
// Lock for editing out and rawSocket
protected final Object rawSocketLock;
@Nullable
private PrintWriter out;
@Nullable
private Socket rawSocket;
/**
* Connect to the peer, potentially a slow operation.
*
* @return Socket connection, null if connection failed.
*/
@Nullable
public abstract Socket connect();
/** Returns true if sockets is a server rawSocket. */
public abstract boolean isServer();
TCPSocket() {
rawSocketLock = new Object();
}
/**
* The listening thread.
*/
@Override
public void run() {
Log.d(TAG, "Listening thread started...");
// Receive connection to temporary variable first, so we don't block.
Socket tempSocket = connect();
BufferedReader in;
Log.d(TAG, "TCP connection established.");
synchronized (rawSocketLock) {
if (rawSocket != null) {
Log.e(TAG, "Socket already existed and will be replaced.");
}
rawSocket = tempSocket;
// Connecting failed, error has already been reported, just exit.
if (rawSocket == null) {
return;
}
try {
out = new PrintWriter(
new OutputStreamWriter(rawSocket.getOutputStream(), Charset.forName("UTF-8")), true);
in = new BufferedReader(
new InputStreamReader(rawSocket.getInputStream(), Charset.forName("UTF-8")));
} catch (IOException e) {
reportError("Failed to open IO on rawSocket: " + e.getMessage());
return;
}
}
Log.v(TAG, "Execute onTCPConnected");
executor.execute(new Runnable() {
@Override
public void run() {
Log.v(TAG, "Run onTCPConnected");
eventListener.onTCPConnected(isServer());
}
});
while (true) {
final String message;
try {
message = in.readLine();
} catch (IOException e) {
synchronized (rawSocketLock) {
// If socket was closed, this is expected.
if (rawSocket == null) {
break;
}
}
reportError("Failed to read from rawSocket: " + e.getMessage());
break;
}
// No data received, rawSocket probably closed.
if (message == null) {
break;
}
executor.execute(new Runnable() {
@Override
public void run() {
Log.v(TAG, "Receive: " + message);
eventListener.onTCPMessage(message);
}
});
}
Log.d(TAG, "Receiving thread exiting...");
// Close the rawSocket if it is still open.
disconnect();
}
/** Closes the rawSocket if it is still open. Also fires the onTCPClose event. */
public void disconnect() {
try {
synchronized (rawSocketLock) {
if (rawSocket != null) {
rawSocket.close();
rawSocket = null;
out = null;
executor.execute(new Runnable() {
@Override
public void run() {
eventListener.onTCPClose();
}
});
}
}
} catch (IOException e) {
reportError("Failed to close rawSocket: " + e.getMessage());
}
}
/**
* Sends a message on the socket. Should only be called on the executor thread.
*/
public void send(String message) {
Log.v(TAG, "Send: " + message);
synchronized (rawSocketLock) {
if (out == null) {
reportError("Sending data on closed socket.");
return;
}
out.write(message + "\n");
out.flush();
}
}
}
private class TCPSocketServer extends TCPSocket {
// Server socket is also guarded by rawSocketLock.
@Nullable
private ServerSocket serverSocket;
final private InetAddress address;
final private int port;
public TCPSocketServer(InetAddress address, int port) {
this.address = address;
this.port = port;
}
/** Opens a listening socket and waits for a connection. */
@Nullable
@Override
public Socket connect() {
Log.d(TAG, "Listening on [" + address.getHostAddress() + "]:" + Integer.toString(port));
final ServerSocket tempSocket;
try {
tempSocket = new ServerSocket(port, 0, address);
} catch (IOException e) {
reportError("Failed to create server socket: " + e.getMessage());
return null;
}
synchronized (rawSocketLock) {
if (serverSocket != null) {
Log.e(TAG, "Server rawSocket was already listening and new will be opened.");
}
serverSocket = tempSocket;
}
try {
return tempSocket.accept();
} catch (IOException e) {
reportError("Failed to receive connection: " + e.getMessage());
return null;
}
}
/** Closes the listening socket and calls super. */
@Override
public void disconnect() {
try {
synchronized (rawSocketLock) {
if (serverSocket != null) {
serverSocket.close();
serverSocket = null;
}
}
} catch (IOException e) {
reportError("Failed to close server socket: " + e.getMessage());
}
super.disconnect();
}
@Override
public boolean isServer() {
return true;
}
}
private class TCPSocketClient extends TCPSocket {
final private InetAddress address;
final private int port;
public TCPSocketClient(InetAddress address, int port) {
this.address = address;
this.port = port;
}
/** Connects to the peer. */
@Nullable
@Override
public Socket connect() {
Log.d(TAG, "Connecting to [" + address.getHostAddress() + "]:" + Integer.toString(port));
try {
return new Socket(address, port);
} catch (IOException e) {
reportError("Failed to connect: " + e.getMessage());
return null;
}
}
@Override
public boolean isServer() {
return false;
}
}
}

Просмотреть файл

@ -0,0 +1,85 @@
/*
* Copyright 2013 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.util.Log;
import android.util.TypedValue;
import android.widget.ScrollView;
import android.widget.TextView;
import java.io.PrintWriter;
import java.io.StringWriter;
/**
* Singleton helper: install a default unhandled exception handler which shows
* an informative dialog and kills the app. Useful for apps whose
* error-handling consists of throwing RuntimeExceptions.
* NOTE: almost always more useful to
* Thread.setDefaultUncaughtExceptionHandler() rather than
* Thread.setUncaughtExceptionHandler(), to apply to background threads as well.
*/
public class UnhandledExceptionHandler implements Thread.UncaughtExceptionHandler {
private static final String TAG = "AppRTCMobileActivity";
private final Activity activity;
public UnhandledExceptionHandler(final Activity activity) {
this.activity = activity;
}
@Override
public void uncaughtException(Thread unusedThread, final Throwable e) {
activity.runOnUiThread(new Runnable() {
@Override
public void run() {
String title = "Fatal error: " + getTopLevelCauseMessage(e);
String msg = getRecursiveStackTrace(e);
TextView errorView = new TextView(activity);
errorView.setText(msg);
errorView.setTextSize(TypedValue.COMPLEX_UNIT_SP, 8);
ScrollView scrollingContainer = new ScrollView(activity);
scrollingContainer.addView(errorView);
Log.e(TAG, title + "\n\n" + msg);
DialogInterface.OnClickListener listener = new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
System.exit(1);
}
};
AlertDialog.Builder builder = new AlertDialog.Builder(activity);
builder.setTitle(title)
.setView(scrollingContainer)
.setPositiveButton("Exit", listener)
.show();
}
});
}
// Returns the Message attached to the original Cause of |t|.
private static String getTopLevelCauseMessage(Throwable t) {
Throwable topLevelCause = t;
while (topLevelCause.getCause() != null) {
topLevelCause = topLevelCause.getCause();
}
return topLevelCause.getMessage();
}
// Returns a human-readable String of the stacktrace in |t|, recursively
// through all Causes that led to |t|.
private static String getRecursiveStackTrace(Throwable t) {
StringWriter writer = new StringWriter();
t.printStackTrace(new PrintWriter(writer));
return writer.toString();
}
}

Просмотреть файл

@ -0,0 +1,296 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc;
import android.os.Handler;
import android.support.annotation.Nullable;
import android.util.Log;
import de.tavendo.autobahn.WebSocket.WebSocketConnectionObserver;
import de.tavendo.autobahn.WebSocketConnection;
import de.tavendo.autobahn.WebSocketException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import org.appspot.apprtc.util.AsyncHttpURLConnection;
import org.appspot.apprtc.util.AsyncHttpURLConnection.AsyncHttpEvents;
import org.json.JSONException;
import org.json.JSONObject;
/**
* WebSocket client implementation.
*
* <p>All public methods should be called from a looper executor thread
* passed in a constructor, otherwise exception will be thrown.
* All events are dispatched on the same thread.
*/
public class WebSocketChannelClient {
private static final String TAG = "WSChannelRTCClient";
private static final int CLOSE_TIMEOUT = 1000;
private final WebSocketChannelEvents events;
private final Handler handler;
private WebSocketConnection ws;
private String wsServerUrl;
private String postServerUrl;
@Nullable
private String roomID;
@Nullable
private String clientID;
private WebSocketConnectionState state;
// Do not remove this member variable. If this is removed, the observer gets garbage collected and
// this causes test breakages.
private WebSocketObserver wsObserver;
private final Object closeEventLock = new Object();
private boolean closeEvent;
// WebSocket send queue. Messages are added to the queue when WebSocket
// client is not registered and are consumed in register() call.
private final List<String> wsSendQueue = new ArrayList<>();
/**
* Possible WebSocket connection states.
*/
public enum WebSocketConnectionState { NEW, CONNECTED, REGISTERED, CLOSED, ERROR }
/**
* Callback interface for messages delivered on WebSocket.
* All events are dispatched from a looper executor thread.
*/
public interface WebSocketChannelEvents {
void onWebSocketMessage(final String message);
void onWebSocketClose();
void onWebSocketError(final String description);
}
public WebSocketChannelClient(Handler handler, WebSocketChannelEvents events) {
this.handler = handler;
this.events = events;
roomID = null;
clientID = null;
state = WebSocketConnectionState.NEW;
}
public WebSocketConnectionState getState() {
return state;
}
public void connect(final String wsUrl, final String postUrl) {
checkIfCalledOnValidThread();
if (state != WebSocketConnectionState.NEW) {
Log.e(TAG, "WebSocket is already connected.");
return;
}
wsServerUrl = wsUrl;
postServerUrl = postUrl;
closeEvent = false;
Log.d(TAG, "Connecting WebSocket to: " + wsUrl + ". Post URL: " + postUrl);
ws = new WebSocketConnection();
wsObserver = new WebSocketObserver();
try {
ws.connect(new URI(wsServerUrl), wsObserver);
} catch (URISyntaxException e) {
reportError("URI error: " + e.getMessage());
} catch (WebSocketException e) {
reportError("WebSocket connection error: " + e.getMessage());
}
}
public void register(final String roomID, final String clientID) {
checkIfCalledOnValidThread();
this.roomID = roomID;
this.clientID = clientID;
if (state != WebSocketConnectionState.CONNECTED) {
Log.w(TAG, "WebSocket register() in state " + state);
return;
}
Log.d(TAG, "Registering WebSocket for room " + roomID + ". ClientID: " + clientID);
JSONObject json = new JSONObject();
try {
json.put("cmd", "register");
json.put("roomid", roomID);
json.put("clientid", clientID);
Log.d(TAG, "C->WSS: " + json.toString());
ws.sendTextMessage(json.toString());
state = WebSocketConnectionState.REGISTERED;
// Send any previously accumulated messages.
for (String sendMessage : wsSendQueue) {
send(sendMessage);
}
wsSendQueue.clear();
} catch (JSONException e) {
reportError("WebSocket register JSON error: " + e.getMessage());
}
}
public void send(String message) {
checkIfCalledOnValidThread();
switch (state) {
case NEW:
case CONNECTED:
// Store outgoing messages and send them after websocket client
// is registered.
Log.d(TAG, "WS ACC: " + message);
wsSendQueue.add(message);
return;
case ERROR:
case CLOSED:
Log.e(TAG, "WebSocket send() in error or closed state : " + message);
return;
case REGISTERED:
JSONObject json = new JSONObject();
try {
json.put("cmd", "send");
json.put("msg", message);
message = json.toString();
Log.d(TAG, "C->WSS: " + message);
ws.sendTextMessage(message);
} catch (JSONException e) {
reportError("WebSocket send JSON error: " + e.getMessage());
}
break;
}
}
// This call can be used to send WebSocket messages before WebSocket
// connection is opened.
public void post(String message) {
checkIfCalledOnValidThread();
sendWSSMessage("POST", message);
}
public void disconnect(boolean waitForComplete) {
checkIfCalledOnValidThread();
Log.d(TAG, "Disconnect WebSocket. State: " + state);
if (state == WebSocketConnectionState.REGISTERED) {
// Send "bye" to WebSocket server.
send("{\"type\": \"bye\"}");
state = WebSocketConnectionState.CONNECTED;
// Send http DELETE to http WebSocket server.
sendWSSMessage("DELETE", "");
}
// Close WebSocket in CONNECTED or ERROR states only.
if (state == WebSocketConnectionState.CONNECTED || state == WebSocketConnectionState.ERROR) {
ws.disconnect();
state = WebSocketConnectionState.CLOSED;
// Wait for websocket close event to prevent websocket library from
// sending any pending messages to deleted looper thread.
if (waitForComplete) {
synchronized (closeEventLock) {
while (!closeEvent) {
try {
closeEventLock.wait(CLOSE_TIMEOUT);
break;
} catch (InterruptedException e) {
Log.e(TAG, "Wait error: " + e.toString());
}
}
}
}
}
Log.d(TAG, "Disconnecting WebSocket done.");
}
private void reportError(final String errorMessage) {
Log.e(TAG, errorMessage);
handler.post(new Runnable() {
@Override
public void run() {
if (state != WebSocketConnectionState.ERROR) {
state = WebSocketConnectionState.ERROR;
events.onWebSocketError(errorMessage);
}
}
});
}
// Asynchronously send POST/DELETE to WebSocket server.
private void sendWSSMessage(final String method, final String message) {
String postUrl = postServerUrl + "/" + roomID + "/" + clientID;
Log.d(TAG, "WS " + method + " : " + postUrl + " : " + message);
AsyncHttpURLConnection httpConnection =
new AsyncHttpURLConnection(method, postUrl, message, new AsyncHttpEvents() {
@Override
public void onHttpError(String errorMessage) {
reportError("WS " + method + " error: " + errorMessage);
}
@Override
public void onHttpComplete(String response) {}
});
httpConnection.send();
}
// Helper method for debugging purposes. Ensures that WebSocket method is
// called on a looper thread.
private void checkIfCalledOnValidThread() {
if (Thread.currentThread() != handler.getLooper().getThread()) {
throw new IllegalStateException("WebSocket method is not called on valid thread");
}
}
private class WebSocketObserver implements WebSocketConnectionObserver {
@Override
public void onOpen() {
Log.d(TAG, "WebSocket connection opened to: " + wsServerUrl);
handler.post(new Runnable() {
@Override
public void run() {
state = WebSocketConnectionState.CONNECTED;
// Check if we have pending register request.
if (roomID != null && clientID != null) {
register(roomID, clientID);
}
}
});
}
@Override
public void onClose(WebSocketCloseNotification code, String reason) {
Log.d(TAG, "WebSocket connection closed. Code: " + code + ". Reason: " + reason + ". State: "
+ state);
synchronized (closeEventLock) {
closeEvent = true;
closeEventLock.notify();
}
handler.post(new Runnable() {
@Override
public void run() {
if (state != WebSocketConnectionState.CLOSED) {
state = WebSocketConnectionState.CLOSED;
events.onWebSocketClose();
}
}
});
}
@Override
public void onTextMessage(String payload) {
Log.d(TAG, "WSS->C: " + payload);
final String message = payload;
handler.post(new Runnable() {
@Override
public void run() {
if (state == WebSocketConnectionState.CONNECTED
|| state == WebSocketConnectionState.REGISTERED) {
events.onWebSocketMessage(message);
}
}
});
}
@Override
public void onRawTextMessage(byte[] payload) {}
@Override
public void onBinaryMessage(byte[] payload) {}
}
}

Просмотреть файл

@ -0,0 +1,427 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc;
import android.os.Handler;
import android.os.HandlerThread;
import android.support.annotation.Nullable;
import android.util.Log;
import org.appspot.apprtc.RoomParametersFetcher.RoomParametersFetcherEvents;
import org.appspot.apprtc.WebSocketChannelClient.WebSocketChannelEvents;
import org.appspot.apprtc.WebSocketChannelClient.WebSocketConnectionState;
import org.appspot.apprtc.util.AsyncHttpURLConnection;
import org.appspot.apprtc.util.AsyncHttpURLConnection.AsyncHttpEvents;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.webrtc.IceCandidate;
import org.webrtc.SessionDescription;
/**
* Negotiates signaling for chatting with https://appr.tc "rooms".
* Uses the client<->server specifics of the apprtc AppEngine webapp.
*
* <p>To use: create an instance of this object (registering a message handler) and
* call connectToRoom(). Once room connection is established
* onConnectedToRoom() callback with room parameters is invoked.
* Messages to other party (with local Ice candidates and answer SDP) can
* be sent after WebSocket connection is established.
*/
public class WebSocketRTCClient implements AppRTCClient, WebSocketChannelEvents {
private static final String TAG = "WSRTCClient";
private static final String ROOM_JOIN = "join";
private static final String ROOM_MESSAGE = "message";
private static final String ROOM_LEAVE = "leave";
private enum ConnectionState { NEW, CONNECTED, CLOSED, ERROR }
private enum MessageType { MESSAGE, LEAVE }
private final Handler handler;
private boolean initiator;
private SignalingEvents events;
private WebSocketChannelClient wsClient;
private ConnectionState roomState;
private RoomConnectionParameters connectionParameters;
private String messageUrl;
private String leaveUrl;
public WebSocketRTCClient(SignalingEvents events) {
this.events = events;
roomState = ConnectionState.NEW;
final HandlerThread handlerThread = new HandlerThread(TAG);
handlerThread.start();
handler = new Handler(handlerThread.getLooper());
}
// --------------------------------------------------------------------
// AppRTCClient interface implementation.
// Asynchronously connect to an AppRTC room URL using supplied connection
// parameters, retrieves room parameters and connect to WebSocket server.
@Override
public void connectToRoom(RoomConnectionParameters connectionParameters) {
this.connectionParameters = connectionParameters;
handler.post(new Runnable() {
@Override
public void run() {
connectToRoomInternal();
}
});
}
@Override
public void disconnectFromRoom() {
handler.post(new Runnable() {
@Override
public void run() {
disconnectFromRoomInternal();
handler.getLooper().quit();
}
});
}
// Connects to room - function runs on a local looper thread.
private void connectToRoomInternal() {
String connectionUrl = getConnectionUrl(connectionParameters);
Log.d(TAG, "Connect to room: " + connectionUrl);
roomState = ConnectionState.NEW;
wsClient = new WebSocketChannelClient(handler, this);
RoomParametersFetcherEvents callbacks = new RoomParametersFetcherEvents() {
@Override
public void onSignalingParametersReady(final SignalingParameters params) {
WebSocketRTCClient.this.handler.post(new Runnable() {
@Override
public void run() {
WebSocketRTCClient.this.signalingParametersReady(params);
}
});
}
@Override
public void onSignalingParametersError(String description) {
WebSocketRTCClient.this.reportError(description);
}
};
new RoomParametersFetcher(connectionUrl, null, callbacks).makeRequest();
}
// Disconnect from room and send bye messages - runs on a local looper thread.
private void disconnectFromRoomInternal() {
Log.d(TAG, "Disconnect. Room state: " + roomState);
if (roomState == ConnectionState.CONNECTED) {
Log.d(TAG, "Closing room.");
sendPostMessage(MessageType.LEAVE, leaveUrl, null);
}
roomState = ConnectionState.CLOSED;
if (wsClient != null) {
wsClient.disconnect(true);
}
}
// Helper functions to get connection, post message and leave message URLs
private String getConnectionUrl(RoomConnectionParameters connectionParameters) {
return connectionParameters.roomUrl + "/" + ROOM_JOIN + "/" + connectionParameters.roomId
+ getQueryString(connectionParameters);
}
private String getMessageUrl(
RoomConnectionParameters connectionParameters, SignalingParameters signalingParameters) {
return connectionParameters.roomUrl + "/" + ROOM_MESSAGE + "/" + connectionParameters.roomId
+ "/" + signalingParameters.clientId + getQueryString(connectionParameters);
}
private String getLeaveUrl(
RoomConnectionParameters connectionParameters, SignalingParameters signalingParameters) {
return connectionParameters.roomUrl + "/" + ROOM_LEAVE + "/" + connectionParameters.roomId + "/"
+ signalingParameters.clientId + getQueryString(connectionParameters);
}
private String getQueryString(RoomConnectionParameters connectionParameters) {
if (connectionParameters.urlParameters != null) {
return "?" + connectionParameters.urlParameters;
} else {
return "";
}
}
// Callback issued when room parameters are extracted. Runs on local
// looper thread.
private void signalingParametersReady(final SignalingParameters signalingParameters) {
Log.d(TAG, "Room connection completed.");
if (connectionParameters.loopback
&& (!signalingParameters.initiator || signalingParameters.offerSdp != null)) {
reportError("Loopback room is busy.");
return;
}
if (!connectionParameters.loopback && !signalingParameters.initiator
&& signalingParameters.offerSdp == null) {
Log.w(TAG, "No offer SDP in room response.");
}
initiator = signalingParameters.initiator;
messageUrl = getMessageUrl(connectionParameters, signalingParameters);
leaveUrl = getLeaveUrl(connectionParameters, signalingParameters);
Log.d(TAG, "Message URL: " + messageUrl);
Log.d(TAG, "Leave URL: " + leaveUrl);
roomState = ConnectionState.CONNECTED;
// Fire connection and signaling parameters events.
events.onConnectedToRoom(signalingParameters);
// Connect and register WebSocket client.
wsClient.connect(signalingParameters.wssUrl, signalingParameters.wssPostUrl);
wsClient.register(connectionParameters.roomId, signalingParameters.clientId);
}
// Send local offer SDP to the other participant.
@Override
public void sendOfferSdp(final SessionDescription sdp) {
handler.post(new Runnable() {
@Override
public void run() {
if (roomState != ConnectionState.CONNECTED) {
reportError("Sending offer SDP in non connected state.");
return;
}
JSONObject json = new JSONObject();
jsonPut(json, "sdp", sdp.description);
jsonPut(json, "type", "offer");
sendPostMessage(MessageType.MESSAGE, messageUrl, json.toString());
if (connectionParameters.loopback) {
// In loopback mode rename this offer to answer and route it back.
SessionDescription sdpAnswer = new SessionDescription(
SessionDescription.Type.fromCanonicalForm("answer"), sdp.description);
events.onRemoteDescription(sdpAnswer);
}
}
});
}
// Send local answer SDP to the other participant.
@Override
public void sendAnswerSdp(final SessionDescription sdp) {
handler.post(new Runnable() {
@Override
public void run() {
if (connectionParameters.loopback) {
Log.e(TAG, "Sending answer in loopback mode.");
return;
}
JSONObject json = new JSONObject();
jsonPut(json, "sdp", sdp.description);
jsonPut(json, "type", "answer");
wsClient.send(json.toString());
}
});
}
// Send Ice candidate to the other participant.
@Override
public void sendLocalIceCandidate(final IceCandidate candidate) {
handler.post(new Runnable() {
@Override
public void run() {
JSONObject json = new JSONObject();
jsonPut(json, "type", "candidate");
jsonPut(json, "label", candidate.sdpMLineIndex);
jsonPut(json, "id", candidate.sdpMid);
jsonPut(json, "candidate", candidate.sdp);
if (initiator) {
// Call initiator sends ice candidates to GAE server.
if (roomState != ConnectionState.CONNECTED) {
reportError("Sending ICE candidate in non connected state.");
return;
}
sendPostMessage(MessageType.MESSAGE, messageUrl, json.toString());
if (connectionParameters.loopback) {
events.onRemoteIceCandidate(candidate);
}
} else {
// Call receiver sends ice candidates to websocket server.
wsClient.send(json.toString());
}
}
});
}
// Send removed Ice candidates to the other participant.
@Override
public void sendLocalIceCandidateRemovals(final IceCandidate[] candidates) {
handler.post(new Runnable() {
@Override
public void run() {
JSONObject json = new JSONObject();
jsonPut(json, "type", "remove-candidates");
JSONArray jsonArray = new JSONArray();
for (final IceCandidate candidate : candidates) {
jsonArray.put(toJsonCandidate(candidate));
}
jsonPut(json, "candidates", jsonArray);
if (initiator) {
// Call initiator sends ice candidates to GAE server.
if (roomState != ConnectionState.CONNECTED) {
reportError("Sending ICE candidate removals in non connected state.");
return;
}
sendPostMessage(MessageType.MESSAGE, messageUrl, json.toString());
if (connectionParameters.loopback) {
events.onRemoteIceCandidatesRemoved(candidates);
}
} else {
// Call receiver sends ice candidates to websocket server.
wsClient.send(json.toString());
}
}
});
}
// --------------------------------------------------------------------
// WebSocketChannelEvents interface implementation.
// All events are called by WebSocketChannelClient on a local looper thread
// (passed to WebSocket client constructor).
@Override
public void onWebSocketMessage(final String msg) {
if (wsClient.getState() != WebSocketConnectionState.REGISTERED) {
Log.e(TAG, "Got WebSocket message in non registered state.");
return;
}
try {
JSONObject json = new JSONObject(msg);
String msgText = json.getString("msg");
String errorText = json.optString("error");
if (msgText.length() > 0) {
json = new JSONObject(msgText);
String type = json.optString("type");
if (type.equals("candidate")) {
events.onRemoteIceCandidate(toJavaCandidate(json));
} else if (type.equals("remove-candidates")) {
JSONArray candidateArray = json.getJSONArray("candidates");
IceCandidate[] candidates = new IceCandidate[candidateArray.length()];
for (int i = 0; i < candidateArray.length(); ++i) {
candidates[i] = toJavaCandidate(candidateArray.getJSONObject(i));
}
events.onRemoteIceCandidatesRemoved(candidates);
} else if (type.equals("answer")) {
if (initiator) {
SessionDescription sdp = new SessionDescription(
SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
events.onRemoteDescription(sdp);
} else {
reportError("Received answer for call initiator: " + msg);
}
} else if (type.equals("offer")) {
if (!initiator) {
SessionDescription sdp = new SessionDescription(
SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
events.onRemoteDescription(sdp);
} else {
reportError("Received offer for call receiver: " + msg);
}
} else if (type.equals("bye")) {
events.onChannelClose();
} else {
reportError("Unexpected WebSocket message: " + msg);
}
} else {
if (errorText != null && errorText.length() > 0) {
reportError("WebSocket error message: " + errorText);
} else {
reportError("Unexpected WebSocket message: " + msg);
}
}
} catch (JSONException e) {
reportError("WebSocket message JSON parsing error: " + e.toString());
}
}
@Override
public void onWebSocketClose() {
events.onChannelClose();
}
@Override
public void onWebSocketError(String description) {
reportError("WebSocket error: " + description);
}
// --------------------------------------------------------------------
// Helper functions.
private void reportError(final String errorMessage) {
Log.e(TAG, errorMessage);
handler.post(new Runnable() {
@Override
public void run() {
if (roomState != ConnectionState.ERROR) {
roomState = ConnectionState.ERROR;
events.onChannelError(errorMessage);
}
}
});
}
// Put a |key|->|value| mapping in |json|.
private static void jsonPut(JSONObject json, String key, Object value) {
try {
json.put(key, value);
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
// Send SDP or ICE candidate to a room server.
private void sendPostMessage(
final MessageType messageType, final String url, @Nullable final String message) {
String logInfo = url;
if (message != null) {
logInfo += ". Message: " + message;
}
Log.d(TAG, "C->GAE: " + logInfo);
AsyncHttpURLConnection httpConnection =
new AsyncHttpURLConnection("POST", url, message, new AsyncHttpEvents() {
@Override
public void onHttpError(String errorMessage) {
reportError("GAE POST error: " + errorMessage);
}
@Override
public void onHttpComplete(String response) {
if (messageType == MessageType.MESSAGE) {
try {
JSONObject roomJson = new JSONObject(response);
String result = roomJson.getString("result");
if (!result.equals("SUCCESS")) {
reportError("GAE POST error: " + result);
}
} catch (JSONException e) {
reportError("GAE POST JSON error: " + e.toString());
}
}
}
});
httpConnection.send();
}
// Converts a Java candidate to a JSONObject.
private JSONObject toJsonCandidate(final IceCandidate candidate) {
JSONObject json = new JSONObject();
jsonPut(json, "label", candidate.sdpMLineIndex);
jsonPut(json, "id", candidate.sdpMid);
jsonPut(json, "candidate", candidate.sdp);
return json;
}
// Converts a JSON candidate to a Java object.
IceCandidate toJavaCandidate(JSONObject json) throws JSONException {
return new IceCandidate(
json.getString("id"), json.getInt("label"), json.getString("candidate"));
}
}

Просмотреть файл

@ -0,0 +1,47 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc.util;
import android.os.Build;
import android.util.Log;
/**
* AppRTCUtils provides helper functions for managing thread safety.
*/
public final class AppRTCUtils {
private AppRTCUtils() {}
/** Helper method which throws an exception when an assertion has failed. */
public static void assertIsTrue(boolean condition) {
if (!condition) {
throw new AssertionError("Expected condition to be true");
}
}
/** Helper method for building a string of thread information.*/
public static String getThreadInfo() {
return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId()
+ "]";
}
/** Information about the current build, taken from system properties. */
public static void logDeviceInfo(String tag) {
Log.d(tag, "Android SDK: " + Build.VERSION.SDK_INT + ", "
+ "Release: " + Build.VERSION.RELEASE + ", "
+ "Brand: " + Build.BRAND + ", "
+ "Device: " + Build.DEVICE + ", "
+ "Id: " + Build.ID + ", "
+ "Hardware: " + Build.HARDWARE + ", "
+ "Manufacturer: " + Build.MANUFACTURER + ", "
+ "Model: " + Build.MODEL + ", "
+ "Product: " + Build.PRODUCT);
}
}

Просмотреть файл

@ -0,0 +1,114 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc.util;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.SocketTimeoutException;
import java.net.URL;
import java.util.Scanner;
/**
* Asynchronous http requests implementation.
*/
public class AsyncHttpURLConnection {
private static final int HTTP_TIMEOUT_MS = 8000;
private static final String HTTP_ORIGIN = "https://appr.tc";
private final String method;
private final String url;
private final String message;
private final AsyncHttpEvents events;
private String contentType;
/**
* Http requests callbacks.
*/
public interface AsyncHttpEvents {
void onHttpError(String errorMessage);
void onHttpComplete(String response);
}
public AsyncHttpURLConnection(String method, String url, String message, AsyncHttpEvents events) {
this.method = method;
this.url = url;
this.message = message;
this.events = events;
}
public void setContentType(String contentType) {
this.contentType = contentType;
}
public void send() {
new Thread(this ::sendHttpMessage).start();
}
private void sendHttpMessage() {
try {
HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
byte[] postData = new byte[0];
if (message != null) {
postData = message.getBytes("UTF-8");
}
connection.setRequestMethod(method);
connection.setUseCaches(false);
connection.setDoInput(true);
connection.setConnectTimeout(HTTP_TIMEOUT_MS);
connection.setReadTimeout(HTTP_TIMEOUT_MS);
// TODO(glaznev) - query request origin from pref_room_server_url_key preferences.
connection.addRequestProperty("origin", HTTP_ORIGIN);
boolean doOutput = false;
if (method.equals("POST")) {
doOutput = true;
connection.setDoOutput(true);
connection.setFixedLengthStreamingMode(postData.length);
}
if (contentType == null) {
connection.setRequestProperty("Content-Type", "text/plain; charset=utf-8");
} else {
connection.setRequestProperty("Content-Type", contentType);
}
// Send POST request.
if (doOutput && postData.length > 0) {
OutputStream outStream = connection.getOutputStream();
outStream.write(postData);
outStream.close();
}
// Get response.
int responseCode = connection.getResponseCode();
if (responseCode != 200) {
events.onHttpError("Non-200 response to " + method + " to URL: " + url + " : "
+ connection.getHeaderField(null));
connection.disconnect();
return;
}
InputStream responseStream = connection.getInputStream();
String response = drainStream(responseStream);
responseStream.close();
connection.disconnect();
events.onHttpComplete(response);
} catch (SocketTimeoutException e) {
events.onHttpError("HTTP " + method + " to " + url + " timeout");
} catch (IOException e) {
events.onHttpError("HTTP " + method + " to " + url + " error: " + e.getMessage());
}
}
// Return the contents of an InputStream as a String.
private static String drainStream(InputStream in) {
Scanner s = new Scanner(in, "UTF-8").useDelimiter("\\A");
return s.hasNext() ? s.next() : "";
}
}

Просмотреть файл

@ -0,0 +1,124 @@
# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
from optparse import OptionParser
import random
import string
import subprocess
import sys
import time
from com.android.monkeyrunner import MonkeyRunner, MonkeyDevice
def main():
parser = OptionParser()
parser.add_option('--devname', dest='devname', help='The device id')
parser.add_option(
'--videooutsave',
dest='videooutsave',
help='The path where to save the video out file on local computer')
parser.add_option(
'--videoout',
dest='videoout',
help='The path where to put the video out file')
parser.add_option(
'--videoout_width',
dest='videoout_width',
type='int',
help='The width for the video out file')
parser.add_option(
'--videoout_height',
dest='videoout_height',
type='int',
help='The height for the video out file')
parser.add_option(
'--videoin',
dest='videoin',
help='The path where to read input file instead of camera')
parser.add_option(
'--call_length',
dest='call_length',
type='int',
help='The length of the call')
(options, args) = parser.parse_args()
print (options, args)
devname = options.devname
videoin = options.videoin
videoout = options.videoout
videoout_width = options.videoout_width
videoout_height = options.videoout_height
videooutsave = options.videooutsave
call_length = options.call_length or 10
room = ''.join(random.choice(string.ascii_letters + string.digits)
for _ in range(8))
# Delete output video file.
if videoout:
subprocess.check_call(['adb', '-s', devname, 'shell', 'rm', '-f',
videoout])
device = MonkeyRunner.waitForConnection(2, devname)
extras = {
'org.appspot.apprtc.USE_VALUES_FROM_INTENT': True,
'org.appspot.apprtc.AUDIOCODEC': 'OPUS',
'org.appspot.apprtc.LOOPBACK': True,
'org.appspot.apprtc.VIDEOCODEC': 'VP8',
'org.appspot.apprtc.CAPTURETOTEXTURE': False,
'org.appspot.apprtc.CAMERA2': False,
'org.appspot.apprtc.ROOMID': room}
if videoin:
extras.update({'org.appspot.apprtc.VIDEO_FILE_AS_CAMERA': videoin})
if videoout:
extras.update({
'org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE': videoout,
'org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_WIDTH': videoout_width,
'org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT': videoout_height})
print extras
device.startActivity(data='https://appr.tc',
action='android.intent.action.VIEW',
component='org.appspot.apprtc/.ConnectActivity', extras=extras)
print 'Running a call for %d seconds' % call_length
for _ in xrange(call_length):
sys.stdout.write('.')
sys.stdout.flush()
time.sleep(1)
print '\nEnding call.'
# Press back to end the call. Will end on both sides.
device.press('KEYCODE_BACK', MonkeyDevice.DOWN_AND_UP)
if videooutsave:
time.sleep(2)
subprocess.check_call(['adb', '-s', devname, 'pull',
videoout, videooutsave])
if __name__ == '__main__':
main()

Просмотреть файл

@ -0,0 +1,15 @@
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
if (is_android) {
import("//build/config/android/rules.gni")
android_java_prebuilt("autobanh_java") {
jar_path = "lib/autobanh.jar"
}
}

Просмотреть файл

@ -0,0 +1,177 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS

Просмотреть файл

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2014 Cameron Lowell Palmer
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

Просмотреть файл

@ -0,0 +1,3 @@
AutobahnAndroid
Copyright 2011,2012 Tavendo GmbH. Licensed under Apache 2.0
This product includes software developed at Tavendo GmbH http://www.tavendo.de

Двоичные данные
third_party/libwebrtc/examples/androidapp/third_party/autobanh/lib/autobanh.jar поставляемый Normal file

Двоичный файл не отображается.

1
third_party/libwebrtc/examples/androidjunit/OWNERS поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
sakal@webrtc.org

8
third_party/libwebrtc/examples/androidjunit/README поставляемый Normal file
Просмотреть файл

@ -0,0 +1,8 @@
This directory contains example JUnit tests for Android AppRTCMobile.
Many of these test utilize Robolectric to mock Android classes.
To compile:
ninja -C out/Debug android_examples_junit_tests
To run:
out/Debug/bin/run_android_examples_junit_tests

Просмотреть файл

@ -0,0 +1,268 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothHeadset;
import android.bluetooth.BluetoothProfile;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.media.AudioManager;
import android.util.Log;
import java.util.ArrayList;
import java.util.List;
import org.appspot.apprtc.AppRTCBluetoothManager.State;
import org.chromium.testing.local.LocalRobolectricTestRunner;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RuntimeEnvironment;
import org.robolectric.annotation.Config;
import org.robolectric.shadows.ShadowLog;
/**
* Verifies basic behavior of the AppRTCBluetoothManager class.
* Note that the test object uses an AppRTCAudioManager (injected in ctor),
* but a mocked version is used instead. Hence, the parts "driven" by the AppRTC
* audio manager are not included in this test.
*/
@RunWith(LocalRobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class BluetoothManagerTest {
private static final String TAG = "BluetoothManagerTest";
private static final String BLUETOOTH_TEST_DEVICE_NAME = "BluetoothTestDevice";
private BroadcastReceiver bluetoothHeadsetStateReceiver;
private BluetoothProfile.ServiceListener bluetoothServiceListener;
private BluetoothHeadset mockedBluetoothHeadset;
private BluetoothDevice mockedBluetoothDevice;
private List<BluetoothDevice> mockedBluetoothDeviceList;
private AppRTCBluetoothManager bluetoothManager;
private AppRTCAudioManager mockedAppRtcAudioManager;
private AudioManager mockedAudioManager;
private Context context;
@Before
public void setUp() {
ShadowLog.stream = System.out;
context = RuntimeEnvironment.application;
mockedAppRtcAudioManager = mock(AppRTCAudioManager.class);
mockedAudioManager = mock(AudioManager.class);
mockedBluetoothHeadset = mock(BluetoothHeadset.class);
mockedBluetoothDevice = mock(BluetoothDevice.class);
mockedBluetoothDeviceList = new ArrayList<BluetoothDevice>();
// Simulate that bluetooth SCO audio is available by default.
when(mockedAudioManager.isBluetoothScoAvailableOffCall()).thenReturn(true);
// Create the test object and override protected methods for this test.
bluetoothManager = new AppRTCBluetoothManager(context, mockedAppRtcAudioManager) {
@Override
protected AudioManager getAudioManager(Context context) {
Log.d(TAG, "getAudioManager");
return mockedAudioManager;
}
@Override
protected void registerReceiver(BroadcastReceiver receiver, IntentFilter filter) {
Log.d(TAG, "registerReceiver");
if (filter.hasAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED)
&& filter.hasAction(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED)) {
// Gives access to the real broadcast receiver so the test can use it.
bluetoothHeadsetStateReceiver = receiver;
}
}
@Override
protected void unregisterReceiver(BroadcastReceiver receiver) {
Log.d(TAG, "unregisterReceiver");
if (receiver == bluetoothHeadsetStateReceiver) {
bluetoothHeadsetStateReceiver = null;
}
}
@Override
protected boolean getBluetoothProfileProxy(
Context context, BluetoothProfile.ServiceListener listener, int profile) {
Log.d(TAG, "getBluetoothProfileProxy");
if (profile == BluetoothProfile.HEADSET) {
// Allows the test to access the real Bluetooth service listener object.
bluetoothServiceListener = listener;
}
return true;
}
@Override
protected boolean hasPermission(Context context, String permission) {
Log.d(TAG, "hasPermission(" + permission + ")");
// Ensure that the client asks for Bluetooth permission.
return android.Manifest.permission.BLUETOOTH.equals(permission);
}
@Override
protected void logBluetoothAdapterInfo(BluetoothAdapter localAdapter) {
// Do nothing in tests. No need to mock BluetoothAdapter.
}
};
}
// Verify that Bluetooth service listener for headset profile is properly initialized.
@Test
public void testBluetoothServiceListenerInitialized() {
bluetoothManager.start();
assertNotNull(bluetoothServiceListener);
verify(mockedAppRtcAudioManager, never()).updateAudioDeviceState();
}
// Verify that broadcast receivers for Bluetooth SCO audio state and Bluetooth headset state
// are properly registered and unregistered.
@Test
public void testBluetoothBroadcastReceiversAreRegistered() {
bluetoothManager.start();
assertNotNull(bluetoothHeadsetStateReceiver);
bluetoothManager.stop();
assertNull(bluetoothHeadsetStateReceiver);
}
// Verify that the Bluetooth manager starts and stops with correct states.
@Test
public void testBluetoothDefaultStartStopStates() {
bluetoothManager.start();
assertEquals(bluetoothManager.getState(), State.HEADSET_UNAVAILABLE);
bluetoothManager.stop();
assertEquals(bluetoothManager.getState(), State.UNINITIALIZED);
}
// Verify correct state after receiving BluetoothServiceListener.onServiceConnected()
// when no BT device is enabled.
@Test
public void testBluetoothServiceListenerConnectedWithNoHeadset() {
bluetoothManager.start();
assertEquals(bluetoothManager.getState(), State.HEADSET_UNAVAILABLE);
simulateBluetoothServiceConnectedWithNoConnectedHeadset();
verify(mockedAppRtcAudioManager, times(1)).updateAudioDeviceState();
assertEquals(bluetoothManager.getState(), State.HEADSET_UNAVAILABLE);
}
// Verify correct state after receiving BluetoothServiceListener.onServiceConnected()
// when one emulated (test) BT device is enabled. Android does not support more than
// one connected BT headset.
@Test
public void testBluetoothServiceListenerConnectedWithHeadset() {
bluetoothManager.start();
assertEquals(bluetoothManager.getState(), State.HEADSET_UNAVAILABLE);
simulateBluetoothServiceConnectedWithConnectedHeadset();
verify(mockedAppRtcAudioManager, times(1)).updateAudioDeviceState();
assertEquals(bluetoothManager.getState(), State.HEADSET_AVAILABLE);
}
// Verify correct state after receiving BluetoothProfile.ServiceListener.onServiceDisconnected().
@Test
public void testBluetoothServiceListenerDisconnected() {
bluetoothManager.start();
assertEquals(bluetoothManager.getState(), State.HEADSET_UNAVAILABLE);
simulateBluetoothServiceDisconnected();
verify(mockedAppRtcAudioManager, times(1)).updateAudioDeviceState();
assertEquals(bluetoothManager.getState(), State.HEADSET_UNAVAILABLE);
}
// Verify correct state after BluetoothServiceListener.onServiceConnected() and
// the intent indicating that the headset is actually connected. Both these callbacks
// results in calls to updateAudioDeviceState() on the AppRTC audio manager.
// No BT SCO is enabled here to keep the test limited.
@Test
public void testBluetoothHeadsetConnected() {
bluetoothManager.start();
assertEquals(bluetoothManager.getState(), State.HEADSET_UNAVAILABLE);
simulateBluetoothServiceConnectedWithConnectedHeadset();
simulateBluetoothHeadsetConnected();
verify(mockedAppRtcAudioManager, times(2)).updateAudioDeviceState();
assertEquals(bluetoothManager.getState(), State.HEADSET_AVAILABLE);
}
// Verify correct state sequence for a case when a BT headset is available,
// followed by BT SCO audio being enabled and then stopped.
@Test
public void testBluetoothScoAudioStartAndStop() {
bluetoothManager.start();
assertEquals(bluetoothManager.getState(), State.HEADSET_UNAVAILABLE);
simulateBluetoothServiceConnectedWithConnectedHeadset();
assertEquals(bluetoothManager.getState(), State.HEADSET_AVAILABLE);
bluetoothManager.startScoAudio();
assertEquals(bluetoothManager.getState(), State.SCO_CONNECTING);
simulateBluetoothScoConnectionConnected();
assertEquals(bluetoothManager.getState(), State.SCO_CONNECTED);
bluetoothManager.stopScoAudio();
simulateBluetoothScoConnectionDisconnected();
assertEquals(bluetoothManager.getState(), State.SCO_DISCONNECTING);
bluetoothManager.stop();
assertEquals(bluetoothManager.getState(), State.UNINITIALIZED);
verify(mockedAppRtcAudioManager, times(3)).updateAudioDeviceState();
}
/**
* Private helper methods.
*/
private void simulateBluetoothServiceConnectedWithNoConnectedHeadset() {
mockedBluetoothDeviceList.clear();
when(mockedBluetoothHeadset.getConnectedDevices()).thenReturn(mockedBluetoothDeviceList);
bluetoothServiceListener.onServiceConnected(BluetoothProfile.HEADSET, mockedBluetoothHeadset);
// In real life, the AppRTC audio manager makes this call.
bluetoothManager.updateDevice();
}
private void simulateBluetoothServiceConnectedWithConnectedHeadset() {
mockedBluetoothDeviceList.clear();
mockedBluetoothDeviceList.add(mockedBluetoothDevice);
when(mockedBluetoothHeadset.getConnectedDevices()).thenReturn(mockedBluetoothDeviceList);
when(mockedBluetoothDevice.getName()).thenReturn(BLUETOOTH_TEST_DEVICE_NAME);
bluetoothServiceListener.onServiceConnected(BluetoothProfile.HEADSET, mockedBluetoothHeadset);
// In real life, the AppRTC audio manager makes this call.
bluetoothManager.updateDevice();
}
private void simulateBluetoothServiceDisconnected() {
bluetoothServiceListener.onServiceDisconnected(BluetoothProfile.HEADSET);
}
private void simulateBluetoothHeadsetConnected() {
Intent intent = new Intent();
intent.setAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED);
intent.putExtra(BluetoothHeadset.EXTRA_STATE, BluetoothHeadset.STATE_CONNECTED);
bluetoothHeadsetStateReceiver.onReceive(context, intent);
}
private void simulateBluetoothScoConnectionConnected() {
Intent intent = new Intent();
intent.setAction(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED);
intent.putExtra(BluetoothHeadset.EXTRA_STATE, BluetoothHeadset.STATE_AUDIO_CONNECTED);
bluetoothHeadsetStateReceiver.onReceive(context, intent);
}
private void simulateBluetoothScoConnectionDisconnected() {
Intent intent = new Intent();
intent.setAction(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED);
intent.putExtra(BluetoothHeadset.EXTRA_STATE, BluetoothHeadset.STATE_AUDIO_DISCONNECTED);
bluetoothHeadsetStateReceiver.onReceive(context, intent);
}
}

Просмотреть файл

@ -0,0 +1,155 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.isNotNull;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.timeout;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import org.chromium.testing.local.LocalRobolectricTestRunner;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.annotation.Config;
import org.robolectric.shadows.ShadowLog;
import org.webrtc.IceCandidate;
import org.webrtc.SessionDescription;
/**
* Test for DirectRTCClient. Test is very simple and only tests the overall sanity of the class
* behaviour.
*/
@RunWith(LocalRobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class DirectRTCClientTest {
private static final String ROOM_URL = "";
private static final boolean LOOPBACK = false;
private static final String DUMMY_SDP_MID = "sdpMid";
private static final String DUMMY_SDP = "sdp";
public static final int SERVER_WAIT = 100;
public static final int NETWORK_TIMEOUT = 1000;
private DirectRTCClient client;
private DirectRTCClient server;
AppRTCClient.SignalingEvents clientEvents;
AppRTCClient.SignalingEvents serverEvents;
@Before
public void setUp() {
ShadowLog.stream = System.out;
clientEvents = mock(AppRTCClient.SignalingEvents.class);
serverEvents = mock(AppRTCClient.SignalingEvents.class);
client = new DirectRTCClient(clientEvents);
server = new DirectRTCClient(serverEvents);
}
@Test
public void testValidIpPattern() {
// Strings that should match the pattern.
// clang-format off
final String[] ipAddresses = new String[] {
"0.0.0.0",
"127.0.0.1",
"192.168.0.1",
"0.0.0.0:8888",
"127.0.0.1:8888",
"192.168.0.1:8888",
"::",
"::1",
"2001:0db8:85a3:0000:0000:8a2e:0370:7946",
"[::]",
"[::1]",
"[2001:0db8:85a3:0000:0000:8a2e:0370:7946]",
"[::]:8888",
"[::1]:8888",
"[2001:0db8:85a3:0000:0000:8a2e:0370:7946]:8888"
};
// clang-format on
for (String ip : ipAddresses) {
assertTrue(ip + " didn't match IP_PATTERN even though it should.",
DirectRTCClient.IP_PATTERN.matcher(ip).matches());
}
}
@Test
public void testInvalidIpPattern() {
// Strings that shouldn't match the pattern.
// clang-format off
final String[] invalidIpAddresses = new String[] {
"Hello, World!",
"aaaa",
"1111",
"[hello world]",
"hello:world"
};
// clang-format on
for (String invalidIp : invalidIpAddresses) {
assertFalse(invalidIp + " matched IP_PATTERN even though it shouldn't.",
DirectRTCClient.IP_PATTERN.matcher(invalidIp).matches());
}
}
// TODO(sakal): Replace isNotNull(class) with isNotNull() once Java 8 is used.
@SuppressWarnings("deprecation")
@Test
public void testDirectRTCClient() {
server.connectToRoom(new AppRTCClient.RoomConnectionParameters(ROOM_URL, "0.0.0.0", LOOPBACK));
try {
Thread.sleep(SERVER_WAIT);
} catch (InterruptedException e) {
fail(e.getMessage());
}
client.connectToRoom(
new AppRTCClient.RoomConnectionParameters(ROOM_URL, "127.0.0.1", LOOPBACK));
verify(serverEvents, timeout(NETWORK_TIMEOUT))
.onConnectedToRoom(any(AppRTCClient.SignalingParameters.class));
SessionDescription offerSdp = new SessionDescription(SessionDescription.Type.OFFER, DUMMY_SDP);
server.sendOfferSdp(offerSdp);
verify(clientEvents, timeout(NETWORK_TIMEOUT))
.onConnectedToRoom(any(AppRTCClient.SignalingParameters.class));
SessionDescription answerSdp =
new SessionDescription(SessionDescription.Type.ANSWER, DUMMY_SDP);
client.sendAnswerSdp(answerSdp);
verify(serverEvents, timeout(NETWORK_TIMEOUT))
.onRemoteDescription(isNotNull(SessionDescription.class));
IceCandidate candidate = new IceCandidate(DUMMY_SDP_MID, 0, DUMMY_SDP);
server.sendLocalIceCandidate(candidate);
verify(clientEvents, timeout(NETWORK_TIMEOUT))
.onRemoteIceCandidate(isNotNull(IceCandidate.class));
client.sendLocalIceCandidate(candidate);
verify(serverEvents, timeout(NETWORK_TIMEOUT))
.onRemoteIceCandidate(isNotNull(IceCandidate.class));
client.disconnectFromRoom();
verify(clientEvents, timeout(NETWORK_TIMEOUT)).onChannelClose();
verify(serverEvents, timeout(NETWORK_TIMEOUT)).onChannelClose();
verifyNoMoreInteractions(clientEvents);
verifyNoMoreInteractions(serverEvents);
}
}

Просмотреть файл

@ -0,0 +1,199 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.timeout;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import org.chromium.testing.local.LocalRobolectricTestRunner;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.robolectric.annotation.Config;
import org.robolectric.shadows.ShadowLog;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
@RunWith(LocalRobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class TCPChannelClientTest {
private static final int PORT = 8888;
/**
* How long we wait before trying to connect to the server. Note: was
* previously only 10, which was too short (tests were flaky).
*/
private static final int SERVER_WAIT = 300;
private static final int CONNECT_TIMEOUT = 1000;
private static final int SEND_TIMEOUT = 1000;
private static final int DISCONNECT_TIMEOUT = 1000;
private static final int TERMINATION_TIMEOUT = 1000;
private static final String TEST_MESSAGE_SERVER = "Hello, Server!";
private static final String TEST_MESSAGE_CLIENT = "Hello, Client!";
@Mock TCPChannelClient.TCPChannelEvents serverEvents;
@Mock TCPChannelClient.TCPChannelEvents clientEvents;
private ExecutorService executor;
private TCPChannelClient server;
private TCPChannelClient client;
@Before
public void setUp() {
ShadowLog.stream = System.out;
MockitoAnnotations.initMocks(this);
executor = Executors.newSingleThreadExecutor();
}
@After
public void tearDown() {
verifyNoMoreEvents();
executeAndWait(new Runnable() {
@Override
public void run() {
client.disconnect();
server.disconnect();
}
});
// Stop the executor thread
executor.shutdown();
try {
executor.awaitTermination(TERMINATION_TIMEOUT, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
fail(e.getMessage());
}
}
@Test
public void testConnectIPv4() {
setUpIPv4Server();
try {
Thread.sleep(SERVER_WAIT);
} catch (InterruptedException e) {
fail(e.getMessage());
}
setUpIPv4Client();
verify(serverEvents, timeout(CONNECT_TIMEOUT)).onTCPConnected(true);
verify(clientEvents, timeout(CONNECT_TIMEOUT)).onTCPConnected(false);
}
@Test
public void testConnectIPv6() {
setUpIPv6Server();
try {
Thread.sleep(SERVER_WAIT);
} catch (InterruptedException e) {
fail(e.getMessage());
}
setUpIPv6Client();
verify(serverEvents, timeout(CONNECT_TIMEOUT)).onTCPConnected(true);
verify(clientEvents, timeout(CONNECT_TIMEOUT)).onTCPConnected(false);
}
@Test
public void testSendData() {
testConnectIPv4();
executeAndWait(new Runnable() {
@Override
public void run() {
client.send(TEST_MESSAGE_SERVER);
server.send(TEST_MESSAGE_CLIENT);
}
});
verify(serverEvents, timeout(SEND_TIMEOUT)).onTCPMessage(TEST_MESSAGE_SERVER);
verify(clientEvents, timeout(SEND_TIMEOUT)).onTCPMessage(TEST_MESSAGE_CLIENT);
}
@Test
public void testDisconnectServer() {
testConnectIPv4();
executeAndWait(new Runnable() {
@Override
public void run() {
server.disconnect();
}
});
verify(serverEvents, timeout(DISCONNECT_TIMEOUT)).onTCPClose();
verify(clientEvents, timeout(DISCONNECT_TIMEOUT)).onTCPClose();
}
@Test
public void testDisconnectClient() {
testConnectIPv4();
executeAndWait(new Runnable() {
@Override
public void run() {
client.disconnect();
}
});
verify(serverEvents, timeout(DISCONNECT_TIMEOUT)).onTCPClose();
verify(clientEvents, timeout(DISCONNECT_TIMEOUT)).onTCPClose();
}
private void setUpIPv4Server() {
setUpServer("0.0.0.0", PORT);
}
private void setUpIPv4Client() {
setUpClient("127.0.0.1", PORT);
}
private void setUpIPv6Server() {
setUpServer("::", PORT);
}
private void setUpIPv6Client() {
setUpClient("::1", PORT);
}
private void setUpServer(String ip, int port) {
server = new TCPChannelClient(executor, serverEvents, ip, port);
}
private void setUpClient(String ip, int port) {
client = new TCPChannelClient(executor, clientEvents, ip, port);
}
/**
* Verifies no more server or client events have been issued
*/
private void verifyNoMoreEvents() {
verifyNoMoreInteractions(serverEvents);
verifyNoMoreInteractions(clientEvents);
}
/**
* Queues runnable to be run and waits for it to be executed by the executor thread
*/
public void executeAndWait(Runnable runnable) {
try {
executor.submit(runnable).get();
} catch (Exception e) {
fail(e.getMessage());
}
}
}

Просмотреть файл

@ -0,0 +1,23 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.webrtc.examples.androidnativeapi">
<uses-sdk android:minSdkVersion="21" android:targetSdkVersion="27" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.CAMERA" />
<application
android:allowBackup="true"
android:label="@string/app_name"
android:supportsRtl="true">
<activity android:name=".MainActivity">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>

81
third_party/libwebrtc/examples/androidnativeapi/BUILD.gn поставляемый Normal file
Просмотреть файл

@ -0,0 +1,81 @@
import("//webrtc.gni")
if (is_android) {
rtc_android_apk("androidnativeapi") {
testonly = true
apk_name = "androidnativeapi"
android_manifest = "AndroidManifest.xml"
min_sdk_version = 21
target_sdk_version = 27
sources = [
"java/org/webrtc/examples/androidnativeapi/CallClient.java",
"java/org/webrtc/examples/androidnativeapi/MainActivity.java",
]
deps = [
":resources",
"//modules/audio_device:audio_device_java",
"//rtc_base:base_java",
"//sdk/android:camera_java",
"//sdk/android:surfaceviewrenderer_java",
"//sdk/android:video_api_java",
"//sdk/android:video_java",
"//third_party/android_deps:com_android_support_support_annotations_java",
]
shared_libraries = [ ":examples_androidnativeapi_jni" ]
}
generate_jni("generated_jni") {
testonly = true
sources = [ "java/org/webrtc/examples/androidnativeapi/CallClient.java" ]
namespace = "webrtc_examples"
jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
}
rtc_shared_library("examples_androidnativeapi_jni") {
testonly = true
sources = [
"jni/android_call_client.cc",
"jni/android_call_client.h",
"jni/onload.cc",
]
suppressed_configs += [ "//build/config/android:hide_all_but_jni_onload" ]
configs += [ "//build/config/android:hide_all_but_jni" ]
deps = [
":generated_jni",
"../../api:scoped_refptr",
"../../rtc_base/synchronization:mutex",
"//api:libjingle_peerconnection_api",
"//api/rtc_event_log:rtc_event_log_factory",
"//api/task_queue:default_task_queue_factory",
"//media:rtc_audio_video",
"//media:rtc_internal_video_codecs",
"//media:rtc_media_engine_defaults",
"//modules/utility",
"//pc:libjingle_peerconnection",
"//rtc_base",
"//rtc_base:rtc_base_approved",
"//sdk/android:native_api_base",
"//sdk/android:native_api_jni",
"//sdk/android:native_api_video",
]
}
android_resources("resources") {
testonly = true
custom_package = "org.webrtc.examples.androidnativeapi"
create_srcjar = false
sources = [
"res/layout/activity_main.xml",
"res/values/strings.xml",
]
# Needed for Bazel converter.
resource_dirs = [ "res" ]
assert(resource_dirs != []) # Mark as used.
}
}

5
third_party/libwebrtc/examples/androidnativeapi/DEPS поставляемый Normal file
Просмотреть файл

@ -0,0 +1,5 @@
include_rules = [
"+logging/rtc_event_log/rtc_event_log_factory.h",
"+modules/utility/include",
"+sdk/android/native_api",
]

1
third_party/libwebrtc/examples/androidnativeapi/OWNERS поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
sakal@webrtc.org

Просмотреть файл

@ -0,0 +1,72 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.examples.androidnativeapi;
import android.content.Context;
import android.os.Handler;
import android.os.HandlerThread;
import org.webrtc.CapturerObserver;
import org.webrtc.SurfaceTextureHelper;
import org.webrtc.VideoCapturer;
import org.webrtc.VideoSink;
public class CallClient {
private static final String TAG = "CallClient";
private static final int CAPTURE_WIDTH = 640;
private static final int CAPTURE_HEIGHT = 480;
private static final int CAPTURE_FPS = 30;
private final Context applicationContext;
private final HandlerThread thread;
private final Handler handler;
private long nativeClient;
private SurfaceTextureHelper surfaceTextureHelper;
private VideoCapturer videoCapturer;
public CallClient(Context applicationContext) {
this.applicationContext = applicationContext;
thread = new HandlerThread(TAG + "Thread");
thread.start();
handler = new Handler(thread.getLooper());
handler.post(() -> { nativeClient = nativeCreateClient(); });
}
public void call(VideoSink localSink, VideoSink remoteSink, VideoCapturer videoCapturer,
SurfaceTextureHelper videoCapturerSurfaceTextureHelper) {
handler.post(() -> {
nativeCall(nativeClient, localSink, remoteSink);
videoCapturer.initialize(videoCapturerSurfaceTextureHelper, applicationContext,
nativeGetJavaVideoCapturerObserver(nativeClient));
videoCapturer.startCapture(CAPTURE_WIDTH, CAPTURE_HEIGHT, CAPTURE_FPS);
});
}
public void hangup() {
handler.post(() -> { nativeHangup(nativeClient); });
}
public void close() {
handler.post(() -> {
nativeDelete(nativeClient);
nativeClient = 0;
});
thread.quitSafely();
}
private static native long nativeCreateClient();
private static native void nativeCall(
long nativeAndroidCallClient, VideoSink localSink, VideoSink remoteSink);
private static native void nativeHangup(long nativeAndroidCallClient);
private static native void nativeDelete(long nativeAndroidCallClient);
private static native CapturerObserver nativeGetJavaVideoCapturerObserver(
long nativeAndroidCallClient);
}

Просмотреть файл

@ -0,0 +1,120 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.examples.androidnativeapi;
import android.app.Activity;
import android.content.Context;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.widget.Button;
import org.webrtc.Camera1Enumerator;
import org.webrtc.Camera2Enumerator;
import org.webrtc.CameraEnumerator;
import org.webrtc.ContextUtils;
import org.webrtc.EglBase;
import org.webrtc.GlRectDrawer;
import org.webrtc.SurfaceTextureHelper;
import org.webrtc.SurfaceViewRenderer;
import org.webrtc.VideoCapturer;
public class MainActivity extends Activity {
private @Nullable CallClient callClient;
private @Nullable EglBase eglBase;
private @Nullable SurfaceViewRenderer localRenderer;
private @Nullable SurfaceViewRenderer remoteRenderer;
private @Nullable SurfaceTextureHelper videoCapturerSurfaceTextureHelper;
private @Nullable VideoCapturer videoCapturer;
@Override
protected void onCreate(Bundle savedInstance) {
ContextUtils.initialize(getApplicationContext());
super.onCreate(savedInstance);
setContentView(R.layout.activity_main);
System.loadLibrary("examples_androidnativeapi_jni");
callClient = new CallClient(getApplicationContext());
Button callButton = (Button) findViewById(R.id.call_button);
callButton.setOnClickListener((view) -> {
if (videoCapturer == null) {
videoCapturer = createVideoCapturer(getApplicationContext());
}
callClient.call(
localRenderer, remoteRenderer, videoCapturer, videoCapturerSurfaceTextureHelper);
});
Button hangupButton = (Button) findViewById(R.id.hangup_button);
hangupButton.setOnClickListener((view) -> { hangup(); });
}
@Override
protected void onStart() {
super.onStart();
eglBase = EglBase.create(null /* sharedContext */, EglBase.CONFIG_PLAIN);
localRenderer = (SurfaceViewRenderer) findViewById(R.id.local_renderer);
remoteRenderer = (SurfaceViewRenderer) findViewById(R.id.remote_renderer);
localRenderer.init(eglBase.getEglBaseContext(), null /* rendererEvents */, EglBase.CONFIG_PLAIN,
new GlRectDrawer());
remoteRenderer.init(eglBase.getEglBaseContext(), null /* rendererEvents */,
EglBase.CONFIG_PLAIN, new GlRectDrawer());
videoCapturerSurfaceTextureHelper =
SurfaceTextureHelper.create("VideoCapturerThread", eglBase.getEglBaseContext());
}
@Override
protected void onStop() {
hangup();
localRenderer.release();
remoteRenderer.release();
videoCapturerSurfaceTextureHelper.dispose();
eglBase.release();
localRenderer = null;
remoteRenderer = null;
videoCapturerSurfaceTextureHelper = null;
eglBase = null;
super.onStop();
}
@Override
protected void onDestroy() {
callClient.close();
callClient = null;
super.onDestroy();
}
private void hangup() {
if (videoCapturer != null) {
try {
videoCapturer.stopCapture();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
videoCapturer.dispose();
videoCapturer = null;
}
callClient.hangup();
}
private static VideoCapturer createVideoCapturer(Context context) {
CameraEnumerator enumerator = Camera2Enumerator.isSupported(context)
? new Camera2Enumerator(context)
: new Camera1Enumerator();
return enumerator.createCapturer(enumerator.getDeviceNames()[0], null /* eventsHandler */);
}
}

Просмотреть файл

@ -0,0 +1,292 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "examples/androidnativeapi/jni/android_call_client.h"
#include <utility>
#include <memory>
#include "api/peer_connection_interface.h"
#include "api/rtc_event_log/rtc_event_log_factory.h"
#include "api/task_queue/default_task_queue_factory.h"
#include "examples/androidnativeapi/generated_jni/CallClient_jni.h"
#include "media/engine/internal_decoder_factory.h"
#include "media/engine/internal_encoder_factory.h"
#include "media/engine/webrtc_media_engine.h"
#include "media/engine/webrtc_media_engine_defaults.h"
#include "sdk/android/native_api/jni/java_types.h"
#include "sdk/android/native_api/video/wrapper.h"
namespace webrtc_examples {
class AndroidCallClient::PCObserver : public webrtc::PeerConnectionObserver {
public:
explicit PCObserver(AndroidCallClient* client);
void OnSignalingChange(
webrtc::PeerConnectionInterface::SignalingState new_state) override;
void OnDataChannel(
rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel) override;
void OnRenegotiationNeeded() override;
void OnIceConnectionChange(
webrtc::PeerConnectionInterface::IceConnectionState new_state) override;
void OnIceGatheringChange(
webrtc::PeerConnectionInterface::IceGatheringState new_state) override;
void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override;
private:
AndroidCallClient* const client_;
};
namespace {
class CreateOfferObserver : public webrtc::CreateSessionDescriptionObserver {
public:
explicit CreateOfferObserver(
rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc);
void OnSuccess(webrtc::SessionDescriptionInterface* desc) override;
void OnFailure(webrtc::RTCError error) override;
private:
const rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc_;
};
class SetRemoteSessionDescriptionObserver
: public webrtc::SetRemoteDescriptionObserverInterface {
public:
void OnSetRemoteDescriptionComplete(webrtc::RTCError error) override;
};
class SetLocalSessionDescriptionObserver
: public webrtc::SetSessionDescriptionObserver {
public:
void OnSuccess() override;
void OnFailure(webrtc::RTCError error) override;
};
} // namespace
AndroidCallClient::AndroidCallClient()
: call_started_(false), pc_observer_(std::make_unique<PCObserver>(this)) {
thread_checker_.Detach();
CreatePeerConnectionFactory();
}
AndroidCallClient::~AndroidCallClient() = default;
void AndroidCallClient::Call(JNIEnv* env,
const webrtc::JavaRef<jobject>& local_sink,
const webrtc::JavaRef<jobject>& remote_sink) {
RTC_DCHECK_RUN_ON(&thread_checker_);
webrtc::MutexLock lock(&pc_mutex_);
if (call_started_) {
RTC_LOG(LS_WARNING) << "Call already started.";
return;
}
call_started_ = true;
local_sink_ = webrtc::JavaToNativeVideoSink(env, local_sink.obj());
remote_sink_ = webrtc::JavaToNativeVideoSink(env, remote_sink.obj());
video_source_ = webrtc::CreateJavaVideoSource(env, signaling_thread_.get(),
/* is_screencast= */ false,
/* align_timestamps= */ true);
CreatePeerConnection();
Connect();
}
void AndroidCallClient::Hangup(JNIEnv* env) {
RTC_DCHECK_RUN_ON(&thread_checker_);
call_started_ = false;
{
webrtc::MutexLock lock(&pc_mutex_);
if (pc_ != nullptr) {
pc_->Close();
pc_ = nullptr;
}
}
local_sink_ = nullptr;
remote_sink_ = nullptr;
video_source_ = nullptr;
}
void AndroidCallClient::Delete(JNIEnv* env) {
RTC_DCHECK_RUN_ON(&thread_checker_);
delete this;
}
webrtc::ScopedJavaLocalRef<jobject>
AndroidCallClient::GetJavaVideoCapturerObserver(JNIEnv* env) {
RTC_DCHECK_RUN_ON(&thread_checker_);
return video_source_->GetJavaVideoCapturerObserver(env);
}
void AndroidCallClient::CreatePeerConnectionFactory() {
network_thread_ = rtc::Thread::CreateWithSocketServer();
network_thread_->SetName("network_thread", nullptr);
RTC_CHECK(network_thread_->Start()) << "Failed to start thread";
worker_thread_ = rtc::Thread::Create();
worker_thread_->SetName("worker_thread", nullptr);
RTC_CHECK(worker_thread_->Start()) << "Failed to start thread";
signaling_thread_ = rtc::Thread::Create();
signaling_thread_->SetName("signaling_thread", nullptr);
RTC_CHECK(signaling_thread_->Start()) << "Failed to start thread";
webrtc::PeerConnectionFactoryDependencies pcf_deps;
pcf_deps.network_thread = network_thread_.get();
pcf_deps.worker_thread = worker_thread_.get();
pcf_deps.signaling_thread = signaling_thread_.get();
pcf_deps.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory();
pcf_deps.call_factory = webrtc::CreateCallFactory();
pcf_deps.event_log_factory = std::make_unique<webrtc::RtcEventLogFactory>(
pcf_deps.task_queue_factory.get());
cricket::MediaEngineDependencies media_deps;
media_deps.task_queue_factory = pcf_deps.task_queue_factory.get();
media_deps.video_encoder_factory =
std::make_unique<webrtc::InternalEncoderFactory>();
media_deps.video_decoder_factory =
std::make_unique<webrtc::InternalDecoderFactory>();
webrtc::SetMediaEngineDefaults(&media_deps);
pcf_deps.media_engine = cricket::CreateMediaEngine(std::move(media_deps));
RTC_LOG(LS_INFO) << "Media engine created: " << pcf_deps.media_engine.get();
pcf_ = CreateModularPeerConnectionFactory(std::move(pcf_deps));
RTC_LOG(LS_INFO) << "PeerConnectionFactory created: " << pcf_;
}
void AndroidCallClient::CreatePeerConnection() {
webrtc::MutexLock lock(&pc_mutex_);
webrtc::PeerConnectionInterface::RTCConfiguration config;
config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
// DTLS SRTP has to be disabled for loopback to work.
config.enable_dtls_srtp = false;
pc_ = pcf_->CreatePeerConnection(config, nullptr /* port_allocator */,
nullptr /* cert_generator */,
pc_observer_.get());
RTC_LOG(LS_INFO) << "PeerConnection created: " << pc_;
rtc::scoped_refptr<webrtc::VideoTrackInterface> local_video_track =
pcf_->CreateVideoTrack("video", video_source_);
local_video_track->AddOrUpdateSink(local_sink_.get(), rtc::VideoSinkWants());
pc_->AddTransceiver(local_video_track);
RTC_LOG(LS_INFO) << "Local video sink set up: " << local_video_track;
for (const rtc::scoped_refptr<webrtc::RtpTransceiverInterface>& tranceiver :
pc_->GetTransceivers()) {
rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> track =
tranceiver->receiver()->track();
if (track &&
track->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) {
static_cast<webrtc::VideoTrackInterface*>(track.get())
->AddOrUpdateSink(remote_sink_.get(), rtc::VideoSinkWants());
RTC_LOG(LS_INFO) << "Remote video sink set up: " << track;
break;
}
}
}
void AndroidCallClient::Connect() {
webrtc::MutexLock lock(&pc_mutex_);
pc_->CreateOffer(new rtc::RefCountedObject<CreateOfferObserver>(pc_),
webrtc::PeerConnectionInterface::RTCOfferAnswerOptions());
}
AndroidCallClient::PCObserver::PCObserver(AndroidCallClient* client)
: client_(client) {}
void AndroidCallClient::PCObserver::OnSignalingChange(
webrtc::PeerConnectionInterface::SignalingState new_state) {
RTC_LOG(LS_INFO) << "OnSignalingChange: " << new_state;
}
void AndroidCallClient::PCObserver::OnDataChannel(
rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel) {
RTC_LOG(LS_INFO) << "OnDataChannel";
}
void AndroidCallClient::PCObserver::OnRenegotiationNeeded() {
RTC_LOG(LS_INFO) << "OnRenegotiationNeeded";
}
void AndroidCallClient::PCObserver::OnIceConnectionChange(
webrtc::PeerConnectionInterface::IceConnectionState new_state) {
RTC_LOG(LS_INFO) << "OnIceConnectionChange: " << new_state;
}
void AndroidCallClient::PCObserver::OnIceGatheringChange(
webrtc::PeerConnectionInterface::IceGatheringState new_state) {
RTC_LOG(LS_INFO) << "OnIceGatheringChange: " << new_state;
}
void AndroidCallClient::PCObserver::OnIceCandidate(
const webrtc::IceCandidateInterface* candidate) {
RTC_LOG(LS_INFO) << "OnIceCandidate: " << candidate->server_url();
webrtc::MutexLock lock(&client_->pc_mutex_);
RTC_DCHECK(client_->pc_ != nullptr);
client_->pc_->AddIceCandidate(candidate);
}
CreateOfferObserver::CreateOfferObserver(
rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc)
: pc_(pc) {}
void CreateOfferObserver::OnSuccess(webrtc::SessionDescriptionInterface* desc) {
std::string sdp;
desc->ToString(&sdp);
RTC_LOG(LS_INFO) << "Created offer: " << sdp;
// Ownership of desc was transferred to us, now we transfer it forward.
pc_->SetLocalDescription(
new rtc::RefCountedObject<SetLocalSessionDescriptionObserver>(), desc);
// Generate a fake answer.
std::unique_ptr<webrtc::SessionDescriptionInterface> answer(
webrtc::CreateSessionDescription(webrtc::SdpType::kAnswer, sdp));
pc_->SetRemoteDescription(
std::move(answer),
new rtc::RefCountedObject<SetRemoteSessionDescriptionObserver>());
}
void CreateOfferObserver::OnFailure(webrtc::RTCError error) {
RTC_LOG(LS_INFO) << "Failed to create offer: " << ToString(error.type())
<< ": " << error.message();
}
void SetRemoteSessionDescriptionObserver::OnSetRemoteDescriptionComplete(
webrtc::RTCError error) {
RTC_LOG(LS_INFO) << "Set remote description: " << error.message();
}
void SetLocalSessionDescriptionObserver::OnSuccess() {
RTC_LOG(LS_INFO) << "Set local description success!";
}
void SetLocalSessionDescriptionObserver::OnFailure(webrtc::RTCError error) {
RTC_LOG(LS_INFO) << "Set local description failure: "
<< ToString(error.type()) << ": " << error.message();
}
static jlong JNI_CallClient_CreateClient(JNIEnv* env) {
return webrtc::NativeToJavaPointer(new webrtc_examples::AndroidCallClient());
}
} // namespace webrtc_examples

Просмотреть файл

@ -0,0 +1,76 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef EXAMPLES_ANDROIDNATIVEAPI_JNI_ANDROID_CALL_CLIENT_H_
#define EXAMPLES_ANDROIDNATIVEAPI_JNI_ANDROID_CALL_CLIENT_H_
#include <jni.h>
#include <memory>
#include <string>
#include "api/peer_connection_interface.h"
#include "api/scoped_refptr.h"
#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_checker.h"
#include "sdk/android/native_api/jni/scoped_java_ref.h"
#include "sdk/android/native_api/video/video_source.h"
namespace webrtc_examples {
class AndroidCallClient {
public:
AndroidCallClient();
~AndroidCallClient();
void Call(JNIEnv* env,
const webrtc::JavaRef<jobject>& local_sink,
const webrtc::JavaRef<jobject>& remote_sink);
void Hangup(JNIEnv* env);
// A helper method for Java code to delete this object. Calls delete this.
void Delete(JNIEnv* env);
webrtc::ScopedJavaLocalRef<jobject> GetJavaVideoCapturerObserver(JNIEnv* env);
private:
class PCObserver;
void CreatePeerConnectionFactory() RTC_RUN_ON(thread_checker_);
void CreatePeerConnection() RTC_RUN_ON(thread_checker_);
void Connect() RTC_RUN_ON(thread_checker_);
rtc::ThreadChecker thread_checker_;
bool call_started_ RTC_GUARDED_BY(thread_checker_);
const std::unique_ptr<PCObserver> pc_observer_;
rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf_
RTC_GUARDED_BY(thread_checker_);
std::unique_ptr<rtc::Thread> network_thread_ RTC_GUARDED_BY(thread_checker_);
std::unique_ptr<rtc::Thread> worker_thread_ RTC_GUARDED_BY(thread_checker_);
std::unique_ptr<rtc::Thread> signaling_thread_
RTC_GUARDED_BY(thread_checker_);
std::unique_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> local_sink_
RTC_GUARDED_BY(thread_checker_);
std::unique_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> remote_sink_
RTC_GUARDED_BY(thread_checker_);
rtc::scoped_refptr<webrtc::JavaVideoTrackSourceInterface> video_source_
RTC_GUARDED_BY(thread_checker_);
webrtc::Mutex pc_mutex_;
rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc_
RTC_GUARDED_BY(pc_mutex_);
};
} // namespace webrtc_examples
#endif // EXAMPLES_ANDROIDNATIVEAPI_JNI_ANDROID_CALL_CLIENT_H_

30
third_party/libwebrtc/examples/androidnativeapi/jni/onload.cc поставляемый Normal file
Просмотреть файл

@ -0,0 +1,30 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <jni.h>
#include "modules/utility/include/jvm_android.h"
#include "rtc_base/ssl_adapter.h"
#include "sdk/android/native_api/base/init.h"
namespace webrtc_examples {
extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM* jvm, void* reserved) {
webrtc::InitAndroid(jvm);
webrtc::JVM::Initialize(jvm);
RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()";
return JNI_VERSION_1_6;
}
extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM* jvm, void* reserved) {
RTC_CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()";
}
} // namespace webrtc_examples

Просмотреть файл

@ -0,0 +1,52 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:orientation="vertical"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:padding="8dp"
tools:context="org.webrtc.examples.androidnativeapi.MainActivity">
<org.webrtc.SurfaceViewRenderer
android:id="@+id/local_renderer"
android:layout_width="match_parent"
android:layout_height="0dp"
android:layout_weight="1"
android:layout_margin="8dp"/>
<org.webrtc.SurfaceViewRenderer
android:id="@+id/remote_renderer"
android:layout_width="match_parent"
android:layout_height="0dp"
android:layout_weight="1"
android:layout_margin="8dp"/>
<LinearLayout
android:orientation="horizontal"
android:layout_width="match_parent"
android:layout_height="48dp"
style="?android:attr/buttonBarStyle">
<Button
android:id="@+id/call_button"
android:text="@string/call_button"
style="?android:attr/buttonBarButtonStyle"
android:layout_width="0dp"
android:layout_height="48dp"
android:layout_weight="1"
android:layout_margin="8dp"/>
<Button
android:id="@+id/hangup_button"
android:text="@string/hangup_button"
style="?android:attr/buttonBarButtonStyle"
android:layout_width="0dp"
android:layout_height="48dp"
android:layout_weight="1"
android:layout_margin="8dp"/>
</LinearLayout>
</LinearLayout>

Просмотреть файл

@ -0,0 +1,5 @@
<resources>
<string name="app_name">androidnativeapi</string>
<string name="call_button">Call</string>
<string name="hangup_button">Hangup</string>
</resources>

27
third_party/libwebrtc/examples/androidtests/AndroidManifest.xml поставляемый Normal file
Просмотреть файл

@ -0,0 +1,27 @@
<!--
* Copyright 2017 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
-->
<manifest
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
package="org.appspot.apprtc.test">
<uses-permission android:name="android.permission.RUN_INSTRUMENTATION" />
<uses-sdk android:minSdkVersion="21" android:targetSdkVersion="21" />
<application>
<uses-library android:name="android.test.runner" />
</application>
<instrumentation android:name="android.support.test.runner.AndroidJUnitRunner"
tools:ignore="MissingPrefix"
android:targetPackage="org.appspot.apprtc"
android:label="Tests for AppRTCMobile"/>
</manifest>

1
third_party/libwebrtc/examples/androidtests/OWNERS поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
sakal@webrtc.org

14
third_party/libwebrtc/examples/androidtests/README поставляемый Normal file
Просмотреть файл

@ -0,0 +1,14 @@
This directory contains an example unit test for Android AppRTCMobile.
Example of building & using the app:
- Build Android AppRTCMobile and AppRTCMobile unit test:
cd <path/to/webrtc>/src
ninja -C out/Debug AppRTCMobile_test_apk
- Install AppRTCMobile and AppRTCMobileTest:
adb install -r out/Debug/apks/AppRTCMobile.apk
adb install -r out/Debug/apks/AppRTCMobileTest.apk
- Run unit tests:
adb shell am instrument -w org.appspot.apprtc.test/android.test.InstrumentationTestRunner

18
third_party/libwebrtc/examples/androidtests/ant.properties поставляемый Normal file
Просмотреть файл

@ -0,0 +1,18 @@
# This file is used to override default values used by the Ant build system.
#
# This file must be checked into Version Control Systems, as it is
# integral to the build system of your project.
# This file is only used by the Ant script.
# You can use this to override default values such as
# 'source.dir' for the location of your java source folder and
# 'out.dir' for the location of your output folder.
# You can also use it define how the release builds are signed by declaring
# the following properties:
# 'key.store' for the location of your keystore and
# 'key.alias' for the name of the key to use.
# The password will be asked during the build when you use the 'release' target.
tested.project.dir=../android

92
third_party/libwebrtc/examples/androidtests/build.xml поставляемый Normal file
Просмотреть файл

@ -0,0 +1,92 @@
<?xml version="1.0" encoding="UTF-8"?>
<project name="AppRTCMobileTest" default="help">
<!-- The local.properties file is created and updated by the 'android' tool.
It contains the path to the SDK. It should *NOT* be checked into
Version Control Systems. -->
<property file="local.properties" />
<!-- The ant.properties file can be created by you. It is only edited by the
'android' tool to add properties to it.
This is the place to change some Ant specific build properties.
Here are some properties you may want to change/update:
source.dir
The name of the source directory. Default is 'src'.
out.dir
The name of the output directory. Default is 'bin'.
For other overridable properties, look at the beginning of the rules
files in the SDK, at tools/ant/build.xml
Properties related to the SDK location or the project target should
be updated using the 'android' tool with the 'update' action.
This file is an integral part of the build system for your
application and should be checked into Version Control Systems.
-->
<property file="ant.properties" />
<!-- if sdk.dir was not set from one of the property file, then
get it from the ANDROID_HOME env var.
This must be done before we load project.properties since
the proguard config can use sdk.dir -->
<property environment="env" />
<condition property="sdk.dir" value="${env.ANDROID_SDK_ROOT}">
<isset property="env.ANDROID_SDK_ROOT" />
</condition>
<!-- The project.properties file is created and updated by the 'android'
tool, as well as ADT.
This contains project specific properties such as project target, and library
dependencies. Lower level build properties are stored in ant.properties
(or in .classpath for Eclipse projects).
This file is an integral part of the build system for your
application and should be checked into Version Control Systems. -->
<loadproperties srcFile="project.properties" />
<!-- quick check on sdk.dir -->
<fail
message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
unless="sdk.dir"
/>
<!--
Import per project custom build rules if present at the root of the project.
This is the place to put custom intermediary targets such as:
-pre-build
-pre-compile
-post-compile (This is typically used for code obfuscation.
Compiled code location: ${out.classes.absolute.dir}
If this is not done in place, override ${out.dex.input.absolute.dir})
-post-package
-post-build
-pre-clean
-->
<import file="custom_rules.xml" optional="true" />
<!-- Import the actual build file.
To customize existing targets, there are two options:
- Customize only one target:
- copy/paste the target into this file, *before* the
<import> task.
- customize it to your needs.
- Customize the whole content of build.xml
- copy/paste the content of the rules files (minus the top node)
into this file, replacing the <import> task.
- customize to your needs.
***********************
****** IMPORTANT ******
***********************
In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
in order to avoid having your file be overridden by tools such as "android update project"
-->
<!-- version-tag: 1 -->
<import file="${sdk.dir}/tools/ant/build.xml" />
</project>

Просмотреть файл

@ -0,0 +1,80 @@
#!/usr/bin/env python
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""
This scripts tests creating an Android Studio project using the
generate_gradle.py script and making a debug build using it.
It expect to be given the webrtc output build directory as the first argument
all other arguments are optional.
"""
import argparse
import logging
import os
import shutil
import subprocess
import sys
import tempfile
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
SRC_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, os.pardir, os.pardir))
GENERATE_GRADLE_SCRIPT = os.path.join(SRC_DIR,
'build/android/gradle/generate_gradle.py')
GRADLEW_BIN = os.path.join(SCRIPT_DIR, 'third_party/gradle/gradlew')
def _RunCommand(argv, cwd=SRC_DIR, **kwargs):
logging.info('Running %r', argv)
subprocess.check_call(argv, cwd=cwd, **kwargs)
def _ParseArgs():
parser = argparse.ArgumentParser(
description='Test generating Android gradle project.')
parser.add_argument('build_dir_android',
help='The path to the build directory for Android.')
parser.add_argument('--project_dir',
help='A temporary directory to put the output.')
args = parser.parse_args()
return args
def main():
logging.basicConfig(level=logging.INFO)
args = _ParseArgs()
project_dir = args.project_dir
if not project_dir:
project_dir = tempfile.mkdtemp()
output_dir = os.path.abspath(args.build_dir_android)
project_dir = os.path.abspath(project_dir)
try:
env = os.environ.copy()
env['PATH'] = os.pathsep.join([
os.path.join(SRC_DIR, 'third_party', 'depot_tools'), env.get('PATH', '')
])
_RunCommand([GENERATE_GRADLE_SCRIPT, '--output-directory', output_dir,
'--target', '//examples:AppRTCMobile',
'--project-dir', project_dir,
'--use-gradle-process-resources', '--split-projects'],
env=env)
_RunCommand([GRADLEW_BIN, 'assembleDebug'], project_dir)
finally:
# Do not delete temporary directory if user specified it manually.
if not args.project_dir:
shutil.rmtree(project_dir, True)
if __name__ == '__main__':
sys.exit(main())

16
third_party/libwebrtc/examples/androidtests/project.properties поставляемый Normal file
Просмотреть файл

@ -0,0 +1,16 @@
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system edit
# "ant.properties", and override values to adapt the script to your
# project structure.
#
# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
# Project target.
target=android-22
java.compilerargs=-Xlint:all -Werror

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше