summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/examples/androidapp
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
commit26a029d407be480d791972afb5975cf62c9360a6 (patch)
treef435a8308119effd964b339f76abb83a57c29483 /third_party/libwebrtc/examples/androidapp
parentInitial commit. (diff)
downloadfirefox-26a029d407be480d791972afb5975cf62c9360a6.tar.xz
firefox-26a029d407be480d791972afb5975cf62c9360a6.zip
Adding upstream version 124.0.1.upstream/124.0.1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/libwebrtc/examples/androidapp')
-rw-r--r--third_party/libwebrtc/examples/androidapp/AndroidManifest.xml60
-rw-r--r--third_party/libwebrtc/examples/androidapp/OWNERS2
-rw-r--r--third_party/libwebrtc/examples/androidapp/README23
-rw-r--r--third_party/libwebrtc/examples/androidapp/ant.properties17
-rw-r--r--third_party/libwebrtc/examples/androidapp/build.xml92
-rw-r--r--third_party/libwebrtc/examples/androidapp/project.properties16
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/disconnect.pngbin0 -> 1404 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_action_full_screen.pngbin0 -> 587 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_action_return_from_full_screen.pngbin0 -> 663 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_launcher.pngbin0 -> 2486 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_loopback_call.pngbin0 -> 1859 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/disconnect.pngbin0 -> 1404 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_action_full_screen.pngbin0 -> 461 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_action_return_from_full_screen.pngbin0 -> 477 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_launcher.pngbin0 -> 2502 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_loopback_call.pngbin0 -> 1859 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/disconnect.pngbin0 -> 1404 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_action_full_screen.pngbin0 -> 461 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_action_return_from_full_screen.pngbin0 -> 477 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_launcher.pngbin0 -> 1700 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_loopback_call.pngbin0 -> 1859 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/disconnect.pngbin0 -> 1404 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_action_full_screen.pngbin0 -> 743 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_action_return_from_full_screen.pngbin0 -> 761 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_launcher.pngbin0 -> 3364 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_loopback_call.pngbin0 -> 1859 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/layout/activity_call.xml34
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/layout/activity_connect.xml80
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/layout/fragment_call.xml77
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/layout/fragment_hud.xml27
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/menu/connect_menu.xml13
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/values-v17/styles.xml10
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/values-v21/styles.xml4
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/values/arrays.xml61
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/values/strings.xml224
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/xml/preferences.xml247
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java594
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCBluetoothManager.java532
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCClient.java137
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCProximitySensor.java158
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CallActivity.java962
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CallFragment.java137
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CaptureQualityController.java110
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java666
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CpuMonitor.java521
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/DirectRTCClient.java346
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/HudFragment.java102
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java1400
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/RecordedAudioToFileController.java143
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/RoomParametersFetcher.java226
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/RtcEventLog.java73
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/SettingsActivity.java317
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/SettingsFragment.java26
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/TCPChannelClient.java362
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/UnhandledExceptionHandler.java85
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/WebSocketChannelClient.java296
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/WebSocketRTCClient.java427
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/util/AppRTCUtils.java47
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/util/AsyncHttpURLConnection.java115
-rw-r--r--third_party/libwebrtc/examples/androidapp/start_loopback_stubbed_camera_saved_video_out.py127
-rw-r--r--third_party/libwebrtc/examples/androidapp/third_party/autobanh/BUILD.gn15
-rw-r--r--third_party/libwebrtc/examples/androidapp/third_party/autobanh/LICENSE177
-rw-r--r--third_party/libwebrtc/examples/androidapp/third_party/autobanh/LICENSE.md21
-rw-r--r--third_party/libwebrtc/examples/androidapp/third_party/autobanh/NOTICE3
-rw-r--r--third_party/libwebrtc/examples/androidapp/third_party/autobanh/lib/autobanh.jarbin0 -> 45472 bytes
65 files changed, 9112 insertions, 0 deletions
diff --git a/third_party/libwebrtc/examples/androidapp/AndroidManifest.xml b/third_party/libwebrtc/examples/androidapp/AndroidManifest.xml
new file mode 100644
index 0000000000..05f1bd3da3
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/AndroidManifest.xml
@@ -0,0 +1,60 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ xmlns:tools="http://schemas.android.com/tools"
+ package="org.appspot.apprtc"
+ android:versionCode="1"
+ android:versionName="1.0">
+
+ <uses-feature android:name="android.hardware.camera" />
+ <uses-feature android:name="android.hardware.camera.autofocus" />
+ <uses-feature android:glEsVersion="0x00020000" android:required="true" />
+
+ <uses-permission android:name="android.permission.CAMERA" />
+ <uses-permission android:name="android.permission.CHANGE_NETWORK_STATE" />
+ <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
+ <uses-permission android:name="android.permission.RECORD_AUDIO" />
+ <uses-permission android:name="android.permission.BLUETOOTH" />
+ <uses-permission android:name="android.permission.INTERNET" />
+ <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
+ <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
+
+ <!-- This is a test application that should always be debuggable. -->
+ <application android:label="@string/app_name"
+ android:icon="@drawable/ic_launcher"
+ android:allowBackup="false"
+ android:debuggable="true"
+ android:supportsRtl="false"
+ tools:ignore="HardcodedDebugMode">
+
+ <activity android:name="ConnectActivity"
+ android:label="@string/app_name"
+ android:windowSoftInputMode="adjustPan"
+ android:theme="@style/AppTheme"
+ android:exported="true">
+ <intent-filter>
+ <action android:name="android.intent.action.MAIN"/>
+ <category android:name="android.intent.category.LAUNCHER"/>
+ </intent-filter>
+
+ <intent-filter>
+ <action android:name="android.intent.action.VIEW"/>
+ <category android:name="android.intent.category.DEFAULT"/>
+ <category android:name="android.intent.category.BROWSABLE"/>
+ <data android:scheme="https" android:host="appr.tc"/>
+ <data android:scheme="http" android:host="appr.tc"/>
+ </intent-filter>
+ </activity>
+
+ <activity android:name="SettingsActivity"
+ android:label="@string/settings_name"
+ android:theme="@style/AppTheme">
+ </activity>
+
+ <activity android:name="CallActivity"
+ android:label="@string/app_name"
+ android:screenOrientation="fullUser"
+ android:configChanges="orientation|smallestScreenSize|screenSize|screenLayout"
+ android:theme="@style/CallActivityTheme">
+ </activity>
+ </application>
+</manifest>
diff --git a/third_party/libwebrtc/examples/androidapp/OWNERS b/third_party/libwebrtc/examples/androidapp/OWNERS
new file mode 100644
index 0000000000..109bea2725
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/OWNERS
@@ -0,0 +1,2 @@
+magjed@webrtc.org
+xalep@webrtc.org
diff --git a/third_party/libwebrtc/examples/androidapp/README b/third_party/libwebrtc/examples/androidapp/README
new file mode 100644
index 0000000000..97e609117c
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/README
@@ -0,0 +1,23 @@
+This directory contains an example Android client for https://appr.tc
+
+Prerequisites:
+- "Getting the code", "Compiling", and "Using the Bundled Android SDK/NDK"
+ on http://www.webrtc.org/native-code/android
+
+Example of building & using the app:
+
+cd <path/to/webrtc>/src
+ninja -C out/Default AppRTCMobile
+adb install -r out/Default/apks/AppRTCMobile.apk
+
+In desktop chrome, navigate to https://appr.tc and note the r=<NNN> room
+this redirects to or navigate directly to https://appr.tc/r/<NNN> with
+your own room number. Launch AppRTC on the device and add same <NNN> into the room name list.
+
+You can also run application from a command line to connect to the first room in a list:
+adb shell am start -n org.appspot.apprtc/.ConnectActivity -a android.intent.action.VIEW
+This should result in the app launching on Android and connecting to the 3-dot-apprtc
+page displayed in the desktop browser.
+To run loopback test execute following command:
+adb shell am start -n org.appspot.apprtc/.ConnectActivity -a android.intent.action.VIEW --ez "org.appspot.apprtc.LOOPBACK" true
+
diff --git a/third_party/libwebrtc/examples/androidapp/ant.properties b/third_party/libwebrtc/examples/androidapp/ant.properties
new file mode 100644
index 0000000000..b0971e891e
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/ant.properties
@@ -0,0 +1,17 @@
+# This file is used to override default values used by the Ant build system.
+#
+# This file must be checked into Version Control Systems, as it is
+# integral to the build system of your project.
+
+# This file is only used by the Ant script.
+
+# You can use this to override default values such as
+# 'source.dir' for the location of your java source folder and
+# 'out.dir' for the location of your output folder.
+
+# You can also use it define how the release builds are signed by declaring
+# the following properties:
+# 'key.store' for the location of your keystore and
+# 'key.alias' for the name of the key to use.
+# The password will be asked during the build when you use the 'release' target.
+
diff --git a/third_party/libwebrtc/examples/androidapp/build.xml b/third_party/libwebrtc/examples/androidapp/build.xml
new file mode 100644
index 0000000000..aa1db6db79
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/build.xml
@@ -0,0 +1,92 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project name="AppRTCMobile" default="help">
+
+ <!-- The local.properties file is created and updated by the 'android' tool.
+ It contains the path to the SDK. It should *NOT* be checked into
+ Version Control Systems. -->
+ <property file="local.properties" />
+
+ <!-- The ant.properties file can be created by you. It is only edited by the
+ 'android' tool to add properties to it.
+ This is the place to change some Ant specific build properties.
+ Here are some properties you may want to change/update:
+
+ source.dir
+ The name of the source directory. Default is 'src'.
+ out.dir
+ The name of the output directory. Default is 'bin'.
+
+ For other overridable properties, look at the beginning of the rules
+ files in the SDK, at tools/ant/build.xml
+
+ Properties related to the SDK location or the project target should
+ be updated using the 'android' tool with the 'update' action.
+
+ This file is an integral part of the build system for your
+ application and should be checked into Version Control Systems.
+
+ -->
+ <property file="ant.properties" />
+
+ <!-- if sdk.dir was not set from one of the property file, then
+ get it from the ANDROID_HOME env var.
+ This must be done before we load project.properties since
+ the proguard config can use sdk.dir -->
+ <property environment="env" />
+ <condition property="sdk.dir" value="${env.ANDROID_SDK_ROOT}">
+ <isset property="env.ANDROID_SDK_ROOT" />
+ </condition>
+
+ <!-- The project.properties file is created and updated by the 'android'
+ tool, as well as ADT.
+
+ This contains project specific properties such as project target, and library
+ dependencies. Lower level build properties are stored in ant.properties
+ (or in .classpath for Eclipse projects).
+
+ This file is an integral part of the build system for your
+ application and should be checked into Version Control Systems. -->
+ <loadproperties srcFile="project.properties" />
+
+ <!-- quick check on sdk.dir -->
+ <fail
+ message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
+ unless="sdk.dir"
+ />
+
+ <!--
+ Import per project custom build rules if present at the root of the project.
+ This is the place to put custom intermediary targets such as:
+ -pre-build
+ -pre-compile
+ -post-compile (This is typically used for code obfuscation.
+ Compiled code location: ${out.classes.absolute.dir}
+ If this is not done in place, override ${out.dex.input.absolute.dir})
+ -post-package
+ -post-build
+ -pre-clean
+ -->
+ <import file="custom_rules.xml" optional="true" />
+
+ <!-- Import the actual build file.
+
+ To customize existing targets, there are two options:
+ - Customize only one target:
+ - copy/paste the target into this file, *before* the
+ <import> task.
+ - customize it to your needs.
+ - Customize the whole content of build.xml
+ - copy/paste the content of the rules files (minus the top node)
+ into this file, replacing the <import> task.
+ - customize to your needs.
+
+ ***********************
+ ****** IMPORTANT ******
+ ***********************
+ In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
+ in order to avoid having your file be overridden by tools such as "android update project"
+ -->
+ <!-- version-tag: 1 -->
+ <import file="${sdk.dir}/tools/ant/build.xml" />
+
+</project>
diff --git a/third_party/libwebrtc/examples/androidapp/project.properties b/third_party/libwebrtc/examples/androidapp/project.properties
new file mode 100644
index 0000000000..a6ca533fe3
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/project.properties
@@ -0,0 +1,16 @@
+# This file is automatically generated by Android Tools.
+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
+#
+# This file must be checked in Version Control Systems.
+#
+# To customize properties used by the Ant build system edit
+# "ant.properties", and override values to adapt the script to your
+# project structure.
+#
+# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
+#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
+
+# Project target.
+target=android-22
+
+java.compilerargs=-Xlint:all -Werror
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/disconnect.png b/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/disconnect.png
new file mode 100644
index 0000000000..be36174c24
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/disconnect.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_action_full_screen.png b/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_action_full_screen.png
new file mode 100644
index 0000000000..22f30d31ca
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_action_full_screen.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_action_return_from_full_screen.png b/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_action_return_from_full_screen.png
new file mode 100644
index 0000000000..d9436e5248
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_action_return_from_full_screen.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_launcher.png b/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_launcher.png
new file mode 100644
index 0000000000..f01a31a717
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_launcher.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_loopback_call.png b/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_loopback_call.png
new file mode 100644
index 0000000000..39311853b3
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_loopback_call.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/disconnect.png b/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/disconnect.png
new file mode 100644
index 0000000000..be36174c24
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/disconnect.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_action_full_screen.png b/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_action_full_screen.png
new file mode 100644
index 0000000000..e4a9ff0a8e
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_action_full_screen.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_action_return_from_full_screen.png b/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_action_return_from_full_screen.png
new file mode 100644
index 0000000000..f5c80f00e7
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_action_return_from_full_screen.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_launcher.png b/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_launcher.png
new file mode 100644
index 0000000000..5492ed770a
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_launcher.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_loopback_call.png b/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_loopback_call.png
new file mode 100644
index 0000000000..39311853b3
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_loopback_call.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/disconnect.png b/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/disconnect.png
new file mode 100644
index 0000000000..be36174c24
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/disconnect.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_action_full_screen.png b/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_action_full_screen.png
new file mode 100644
index 0000000000..e4a9ff0a8e
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_action_full_screen.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_action_return_from_full_screen.png b/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_action_return_from_full_screen.png
new file mode 100644
index 0000000000..f5c80f00e7
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_action_return_from_full_screen.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_launcher.png b/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_launcher.png
new file mode 100644
index 0000000000..b8b4b0ec4b
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_launcher.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_loopback_call.png b/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_loopback_call.png
new file mode 100644
index 0000000000..39311853b3
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_loopback_call.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/disconnect.png b/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/disconnect.png
new file mode 100644
index 0000000000..be36174c24
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/disconnect.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_action_full_screen.png b/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_action_full_screen.png
new file mode 100644
index 0000000000..6d90c071d5
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_action_full_screen.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_action_return_from_full_screen.png b/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_action_return_from_full_screen.png
new file mode 100644
index 0000000000..a773b34208
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_action_return_from_full_screen.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_launcher.png b/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_launcher.png
new file mode 100644
index 0000000000..a3cd45890c
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_launcher.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_loopback_call.png b/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_loopback_call.png
new file mode 100644
index 0000000000..39311853b3
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_loopback_call.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/layout/activity_call.xml b/third_party/libwebrtc/examples/androidapp/res/layout/activity_call.xml
new file mode 100644
index 0000000000..bf811426f3
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/layout/activity_call.xml
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="utf-8"?>
+
+<!-- tools:ignore is needed because lint thinks this can be replaced with a merge. Replacing this
+ with a merge causes the fullscreen SurfaceView not to be centered. -->
+<FrameLayout
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ xmlns:tools="http://schemas.android.com/tools"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ tools:ignore="MergeRootFrame">
+
+ <org.webrtc.SurfaceViewRenderer
+ android:id="@+id/fullscreen_video_view"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_gravity="center" />
+
+ <org.webrtc.SurfaceViewRenderer
+ android:id="@+id/pip_video_view"
+ android:layout_height="144dp"
+ android:layout_width="wrap_content"
+ android:layout_gravity="bottom|end"
+ android:layout_margin="16dp"/>
+
+ <FrameLayout
+ android:id="@+id/call_fragment_container"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent" />
+ <FrameLayout
+ android:id="@+id/hud_fragment_container"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent" />
+
+</FrameLayout>
diff --git a/third_party/libwebrtc/examples/androidapp/res/layout/activity_connect.xml b/third_party/libwebrtc/examples/androidapp/res/layout/activity_connect.xml
new file mode 100644
index 0000000000..017e5cabff
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/layout/activity_connect.xml
@@ -0,0 +1,80 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ xmlns:tools="http://schemas.android.com/tools"
+ android:layout_margin="16dp"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:orientation="vertical"
+ android:weightSum="1"
+ android:layout_centerHorizontal="true">
+
+ <TextView
+ android:id="@+id/room_edittext_description"
+ android:layout_width="match_parent"
+ android:layout_height="wrap_content"
+ android:text="@string/room_description"/>
+
+ <LinearLayout
+ android:orientation="horizontal"
+ android:layout_width="match_parent"
+ android:layout_height="wrap_content"
+ android:gravity="center"
+ android:layout_marginBottom="8dp">
+
+ <!-- TODO(crbug.com/900912): Fix and remove lint ignore -->
+ <EditText
+ tools:ignore="LabelFor,Autofill"
+ android:id="@+id/room_edittext"
+ android:layout_width="0dp"
+ android:layout_height="wrap_content"
+ android:layout_weight="1"
+ android:maxLines="1"
+ android:imeOptions="actionDone"
+ android:inputType="text"/>
+
+ <ImageButton
+ android:id="@+id/connect_button"
+ android:layout_width="48dp"
+ android:layout_height="48dp"
+ android:contentDescription="@string/connect_description"
+ android:background="@android:drawable/sym_action_call" />
+
+ <ImageButton
+ android:id="@+id/add_favorite_button"
+ android:layout_width="48dp"
+ android:layout_height="48dp"
+ android:contentDescription="@string/add_favorite_description"
+ android:background="@android:drawable/ic_input_add" />
+ </LinearLayout>
+
+ <TextView
+ android:id="@+id/room_listview_description"
+ android:layout_width="match_parent"
+ android:layout_height="48dp"
+ android:layout_marginTop="8dp"
+ android:lines="1"
+ android:maxLines="1"
+ android:textAppearance="?android:attr/textAppearanceMedium"
+ android:text="@string/favorites"
+ android:gravity="center_vertical"/>
+
+ <FrameLayout
+ android:layout_width="match_parent"
+ android:layout_height="0dp"
+ android:layout_weight="1">
+
+ <ListView
+ android:id="@+id/room_listview"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:drawSelectorOnTop="false" />
+
+ <TextView
+ android:id="@android:id/empty"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:gravity="center"
+ android:text="@string/no_favorites" />
+ </FrameLayout>
+</LinearLayout>
diff --git a/third_party/libwebrtc/examples/androidapp/res/layout/fragment_call.xml b/third_party/libwebrtc/examples/androidapp/res/layout/fragment_call.xml
new file mode 100644
index 0000000000..90b1e9ca0e
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/layout/fragment_call.xml
@@ -0,0 +1,77 @@
+<?xml version="1.0" encoding="utf-8"?>
+
+<RelativeLayout
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent">
+
+ <TextView
+ android:id="@+id/contact_name_call"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_centerHorizontal="true"
+ android:layout_above="@+id/buttons_call_container"
+ android:textSize="24sp"
+ android:layout_margin="8dp"/>
+
+ <LinearLayout
+ android:id="@+id/buttons_call_container"
+ android:orientation="horizontal"
+ android:layout_above="@+id/capture_format_text_call"
+ android:layout_alignWithParentIfMissing="true"
+ android:layout_marginBottom="32dp"
+ android:layout_centerHorizontal="true"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content">
+
+ <ImageButton
+ android:id="@+id/button_call_disconnect"
+ android:background="@drawable/disconnect"
+ android:contentDescription="@string/disconnect_call"
+ android:layout_marginEnd="16dp"
+ android:layout_width="48dp"
+ android:layout_height="48dp"/>
+
+ <ImageButton
+ android:id="@+id/button_call_switch_camera"
+ android:background="@android:drawable/ic_menu_camera"
+ android:contentDescription="@string/switch_camera"
+ android:layout_marginEnd="8dp"
+ android:layout_width="48dp"
+ android:layout_height="48dp"/>
+
+ <ImageButton
+ android:id="@+id/button_call_scaling_mode"
+ android:background="@drawable/ic_action_return_from_full_screen"
+ android:contentDescription="@string/disconnect_call"
+ android:layout_width="48dp"
+ android:layout_height="48dp"/>
+
+ <ImageButton
+ android:id="@+id/button_call_toggle_mic"
+ android:background="@android:drawable/ic_btn_speak_now"
+ android:contentDescription="@string/toggle_mic"
+ android:layout_marginEnd="8dp"
+ android:layout_width="48dp"
+ android:layout_height="48dp"/>
+ </LinearLayout>
+
+ <TextView
+ android:id="@+id/capture_format_text_call"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_centerHorizontal="true"
+ android:layout_above="@+id/capture_format_slider_call"
+ android:textSize="16sp"
+ android:text="@string/capture_format_change_text"/>
+
+ <SeekBar
+ android:id="@+id/capture_format_slider_call"
+ android:layout_width="match_parent"
+ android:layout_height="wrap_content"
+ android:layout_centerHorizontal="true"
+ android:layout_alignParentBottom="true"
+ android:progress="50"
+ android:layout_margin="8dp"/>
+
+</RelativeLayout>
diff --git a/third_party/libwebrtc/examples/androidapp/res/layout/fragment_hud.xml b/third_party/libwebrtc/examples/androidapp/res/layout/fragment_hud.xml
new file mode 100644
index 0000000000..483e7ba456
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/layout/fragment_hud.xml
@@ -0,0 +1,27 @@
+<?xml version="1.0" encoding="utf-8"?>
+
+<RelativeLayout
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent">
+
+ <ImageButton
+ android:id="@+id/button_toggle_debug"
+ android:background="@android:drawable/ic_menu_info_details"
+ android:contentDescription="@string/toggle_debug"
+ android:layout_alignParentBottom="true"
+ android:layout_alignParentStart="true"
+ android:layout_width="48dp"
+ android:layout_height="48dp"/>
+
+ <TextView
+ android:id="@+id/hud_stat_call"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_alignParentEnd="true"
+ android:textStyle="bold"
+ android:textColor="#C000FF00"
+ android:textSize="12sp"
+ android:layout_margin="8dp"/>
+
+</RelativeLayout>
diff --git a/third_party/libwebrtc/examples/androidapp/res/menu/connect_menu.xml b/third_party/libwebrtc/examples/androidapp/res/menu/connect_menu.xml
new file mode 100644
index 0000000000..a723f54941
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/menu/connect_menu.xml
@@ -0,0 +1,13 @@
+<menu xmlns:android="http://schemas.android.com/apk/res/android">
+ <item
+ android:id="@+id/action_loopback"
+ android:icon="@drawable/ic_loopback_call"
+ android:showAsAction="always"
+ android:title="@string/action_loopback"/>
+ <item
+ android:id="@+id/action_settings"
+ android:orderInCategory="100"
+ android:icon="@android:drawable/ic_menu_preferences"
+ android:showAsAction="ifRoom"
+ android:title="@string/action_settings"/>
+</menu>
diff --git a/third_party/libwebrtc/examples/androidapp/res/values-v17/styles.xml b/third_party/libwebrtc/examples/androidapp/res/values-v17/styles.xml
new file mode 100644
index 0000000000..969b5012e9
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/values-v17/styles.xml
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <style name="AppTheme" parent="android:Theme.Holo" />
+
+ <style name="CallActivityTheme" parent="android:Theme.Black">
+ <item name="android:windowActionBar">false</item>
+ <item name="android:windowFullscreen">true</item>
+ <item name="android:windowNoTitle">true</item>
+ </style>
+</resources>
diff --git a/third_party/libwebrtc/examples/androidapp/res/values-v21/styles.xml b/third_party/libwebrtc/examples/androidapp/res/values-v21/styles.xml
new file mode 100644
index 0000000000..b19af7e38f
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/values-v21/styles.xml
@@ -0,0 +1,4 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <style name="AppTheme" parent="android:Theme.Material" />
+</resources>
diff --git a/third_party/libwebrtc/examples/androidapp/res/values/arrays.xml b/third_party/libwebrtc/examples/androidapp/res/values/arrays.xml
new file mode 100644
index 0000000000..4a2948c875
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/values/arrays.xml
@@ -0,0 +1,61 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <string-array name="videoResolutions">
+ <item>Default</item>
+ <item>4K (3840 x 2160)</item>
+ <item>Full HD (1920 x 1080)</item>
+ <item>HD (1280 x 720)</item>
+ <item>VGA (640 x 480)</item>
+ <item>QVGA (320 x 240)</item>
+ </string-array>
+
+ <string-array name="videoResolutionsValues">
+ <item>Default</item>
+ <item>3840 x 2160</item>
+ <item>1920 x 1080</item>
+ <item>1280 x 720</item>
+ <item>640 x 480</item>
+ <item>320 x 240</item>
+ </string-array>
+
+ <string-array name="cameraFps">
+ <item>Default</item>
+ <item>30 fps</item>
+ <item>15 fps</item>
+ </string-array>
+
+ <string-array name="startBitrate">
+ <item>Default</item>
+ <item>Manual</item>
+ </string-array>
+
+ <string-array name="videoCodecs">
+ <item>VP8</item>
+ <item>VP9</item>
+ <item>H264 Baseline</item>
+ <item>H264 High</item>
+ <item>AV1</item>
+ </string-array>
+
+ <string-array name="audioCodecs">
+ <item>OPUS</item>
+ <item>ISAC</item>
+ </string-array>
+
+ <string-array name="speakerphone">
+ <item>Auto (proximity sensor)</item>
+ <item>Enabled</item>
+ <item>Disabled</item>
+ </string-array>
+
+ <string-array name="speakerphoneValues">
+ <item>auto</item>
+ <item>true</item>
+ <item>false</item>
+ </string-array>
+
+ <string-array name="roomListContextMenu">
+ <item>Remove favorite</item>
+ </string-array>
+
+</resources>
diff --git a/third_party/libwebrtc/examples/androidapp/res/values/strings.xml b/third_party/libwebrtc/examples/androidapp/res/values/strings.xml
new file mode 100644
index 0000000000..814966f200
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/values/strings.xml
@@ -0,0 +1,224 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <string name="app_name" translatable="false">AppRTC</string>
+ <string name="settings_name" translatable="false">AppRTC Settings</string>
+ <string name="disconnect_call">Disconnect Call</string>
+ <string name="room_description">
+ Please enter a room name. Room names are shared with everyone, so think
+ of something unique and send it to a friend.
+ </string>
+ <string name="favorites">Favorites</string>
+ <string name="no_favorites">No favorites</string>
+ <string name="invalid_url_title">Invalid URL</string>
+ <string name="invalid_url_text">The URL or room name you entered resulted in an invalid URL: %1$s
+ </string>
+ <string name="channel_error_title">Connection error</string>
+ <string name="connecting_to">Connecting to: %1$s</string>
+ <string name="missing_url">FATAL ERROR: Missing URL to connect to.</string>
+ <string name="camera2_texture_only_error">Camera2 only supports capturing to texture. Either disable Camera2 or enable capturing to texture in the options.</string>
+ <string name="ok">OK</string>
+ <string name="switch_camera">Switch front/back camera</string>
+ <string name="capture_format_change_text">Slide to change capture format</string>
+ <string name="muted">Muted</string>
+ <string name="toggle_debug">Toggle debug view</string>
+ <string name="toggle_mic">Toggle microphone on/off</string>
+ <string name="action_settings">Settings</string>
+ <string name="action_loopback">Loopback connection</string>
+ <string name="connect_description">Connect to the room</string>
+ <string name="add_favorite_description">Add favorite</string>
+ <string name="format_description">%1$dx%2$d @ %3$d fps</string>
+ <string name="missing_permissions_try_again">The application is missing permissions. It might not work correctly. Do you want to try again?</string>
+ <string name="yes">Yes</string>
+ <string name="no">No</string>
+
+ <!-- Settings strings. -->
+ <string name="pref_room_key">room_preference</string>
+ <string name="pref_room_list_key">room_list_preference</string>
+
+ <string name="pref_videosettings_key">video_settings_key</string>
+ <string name="pref_videosettings_title">WebRTC video settings.</string>
+
+ <string name="pref_videocall_key">videocall_preference</string>
+ <string name="pref_videocall_title">Video call.</string>
+ <string name="pref_videocall_dlg">Enable video in a call.</string>
+ <string name="pref_videocall_default">true</string>
+
+ <string name="pref_screencapture_key">screencapture_preference</string>
+ <string name="pref_screencapture_title">Use screencapture.</string>
+ <string name="pref_screencapture_default">false</string>
+
+ <string name="pref_camera2_key">camera2_preference</string>
+ <string name="pref_camera2_title">Use Camera2.</string>
+ <string name="pref_camera2_default">true</string>
+ <string name="pref_camera2_not_supported">Not supported on this device.</string>
+
+ <string name="pref_resolution_key">resolution_preference</string>
+ <string name="pref_resolution_title">Video resolution.</string>
+ <string name="pref_resolution_dlg">Enter AppRTC local video resolution.</string>
+ <string name="pref_resolution_default">Default</string>
+
+ <string name="pref_fps_key">fps_preference</string>
+ <string name="pref_fps_title">Camera fps.</string>
+ <string name="pref_fps_dlg">Enter local camera fps.</string>
+ <string name="pref_fps_default">Default</string>
+
+ <string name="pref_capturequalityslider_key">capturequalityslider_preference</string>
+ <string name="pref_capturequalityslider_title">Capture quality slider.</string>
+ <string name="pref_capturequalityslider_dlg">Enable slider for changing capture quality.</string>
+ <string name="pref_capturequalityslider_default">false</string>
+
+ <string name="pref_maxvideobitrate_key">maxvideobitrate_preference</string>
+ <string name="pref_maxvideobitrate_title">Maximum video bitrate setting.</string>
+ <string name="pref_maxvideobitrate_dlg">Maximum video bitrate setting.</string>
+ <string name="pref_maxvideobitrate_default">Default</string>
+
+ <string name="pref_maxvideobitratevalue_key">maxvideobitratevalue_preference</string>
+ <string name="pref_maxvideobitratevalue_title">Video encoder maximum bitrate.</string>
+ <string name="pref_maxvideobitratevalue_dlg">Enter video encoder maximum bitrate in kbps.</string>
+ <string name="pref_maxvideobitratevalue_default">1700</string>
+
+ <string name="pref_videocodec_key">videocodec_preference</string>
+ <string name="pref_videocodec_title">Default video codec.</string>
+ <string name="pref_videocodec_dlg">Select default video codec.</string>
+ <string name="pref_videocodec_default">VP8</string>
+
+ <string name="pref_hwcodec_key">hwcodec_preference</string>
+ <string name="pref_hwcodec_title">Video codec hardware acceleration.</string>
+ <string name="pref_hwcodec_dlg">Use hardware accelerated video codec (if available).</string>
+ <string name="pref_hwcodec_default">true</string>
+
+ <string name="pref_capturetotexture_key">capturetotexture_preference</string>
+ <string name="pref_capturetotexture_title">Video capture to surface texture.</string>
+ <string name="pref_capturetotexture_dlg">Capture video to textures (if available).</string>
+ <string name="pref_capturetotexture_default">true</string>
+
+ <string name="pref_flexfec_key">flexfec_preference</string>
+ <string name="pref_flexfec_title">Codec-agnostic Flexible FEC.</string>
+ <string name="pref_flexfec_dlg">Enable FlexFEC.</string>
+ <string name="pref_flexfec_default">false</string>
+
+ <string name="pref_value_enabled">Enabled</string>
+ <string name="pref_value_disabled">Disabled</string>
+
+ <string name="pref_audiosettings_key">audio_settings_key</string>
+ <string name="pref_audiosettings_title">WebRTC audio settings.</string>
+
+ <string name="pref_startaudiobitrate_key">startaudiobitrate_preference</string>
+ <string name="pref_startaudiobitrate_title">Audio bitrate setting.</string>
+ <string name="pref_startaudiobitrate_dlg">Audio bitrate setting.</string>
+ <string name="pref_startaudiobitrate_default">Default</string>
+
+ <string name="pref_startaudiobitratevalue_key">startaudiobitratevalue_preference</string>
+ <string name="pref_startaudiobitratevalue_title">Audio codec bitrate.</string>
+ <string name="pref_startaudiobitratevalue_dlg">Enter audio codec bitrate in kbps.</string>
+ <string name="pref_startaudiobitratevalue_default">32</string>
+
+ <string name="pref_audiocodec_key">audiocodec_preference</string>
+ <string name="pref_audiocodec_title">Default audio codec.</string>
+ <string name="pref_audiocodec_dlg">Select default audio codec.</string>
+ <string name="pref_audiocodec_default">OPUS</string>
+
+ <string name="pref_noaudioprocessing_key">audioprocessing_preference</string>
+ <string name="pref_noaudioprocessing_title">Disable audio processing.</string>
+ <string name="pref_noaudioprocessing_dlg">Disable audio processing pipeline.</string>
+ <string name="pref_noaudioprocessing_default">false</string>
+
+ <string name="pref_aecdump_key">aecdump_preference</string>
+ <string name="pref_aecdump_title">Create aecdump.</string>
+ <string name="pref_aecdump_dlg">Enable diagnostic audio recordings.</string>
+ <string name="pref_aecdump_default">false</string>
+
+ <string name="pref_enable_save_input_audio_to_file_key">enable_key</string>
+ <string name="pref_enable_save_input_audio_to_file_title">Save input audio to file.</string>
+ <string name="pref_enable_save_input_audio_to_file_dlg">Save input audio to file.</string>
+ <string name="pref_enable_save_input_audio_to_file_default">false</string>
+
+ <string name="pref_opensles_key">opensles_preference</string>
+ <string name="pref_opensles_title">Use OpenSL ES for audio playback.</string>
+ <string name="pref_opensles_dlg">Use OpenSL ES for audio playback.</string>
+ <string name="pref_opensles_default">false</string>
+
+ <string name="pref_disable_built_in_aec_key">disable_built_in_aec_preference</string>
+ <string name="pref_disable_built_in_aec_title">Disable hardware AEC.</string>
+ <string name="pref_disable_built_in_aec_dlg">Disable hardware AEC.</string>
+ <string name="pref_disable_built_in_aec_default">false</string>
+ <string name="pref_built_in_aec_not_available">Hardware AEC is not available</string>
+
+ <string name="pref_disable_built_in_agc_key">disable_built_in_agc_preference</string>
+ <string name="pref_disable_built_in_agc_title">Disable hardware AGC.</string>
+ <string name="pref_disable_built_in_agc_dlg">Disable hardware AGC.</string>
+ <string name="pref_disable_built_in_agc_default">false</string>
+ <string name="pref_built_in_agc_not_available">Hardware AGC is not available</string>
+
+ <string name="pref_disable_built_in_ns_key">disable_built_in_ns_preference</string>
+ <string name="pref_disable_built_in_ns_title">Disable hardware NS.</string>
+ <string name="pref_disable_built_in_ns_dlg">Disable hardware NS.</string>
+ <string name="pref_disable_built_in_ns_default">false</string>
+ <string name="pref_built_in_ns_not_available">Hardware NS is not available</string>
+
+ <string name="pref_disable_webrtc_agc_and_hpf_key">disable_webrtc_agc_and_hpf_preference</string>
+ <string name="pref_disable_webrtc_agc_and_hpf_title">Disable WebRTC AGC and HPF.</string>
+ <string name="pref_disable_webrtc_agc_default">false</string>
+
+ <string name="pref_speakerphone_key">speakerphone_preference</string>
+ <string name="pref_speakerphone_title">Speakerphone.</string>
+ <string name="pref_speakerphone_dlg">Speakerphone.</string>
+ <string name="pref_speakerphone_default">auto</string>
+
+ <string name="pref_datasettings_key">data_settings_key</string>
+ <string name="pref_datasettings_title">WebRTC data channel settings.</string>
+
+ <string name="pref_enable_datachannel_key">enable_datachannel_preference</string>
+ <string name="pref_enable_datachannel_title">Enable datachannel.</string>
+ <string name="pref_enable_datachannel_default" translatable="false">true</string>
+
+ <string name="pref_ordered_key">ordered_preference</string>
+ <string name="pref_ordered_title">Order messages.</string>
+ <string name="pref_ordered_default" translatable="false">true</string>
+
+ <string name="pref_data_protocol_key">Subprotocol</string>
+ <string name="pref_data_protocol_title">Subprotocol.</string>
+ <string name="pref_data_protocol_dlg">Enter subprotocol.</string>
+ <string name="pref_data_protocol_default" translatable="false"></string>
+
+ <string name="pref_negotiated_key">negotiated_preference</string>
+ <string name="pref_negotiated_title">Negotiated.</string>
+ <string name="pref_negotiated_default" translatable="false">false</string>
+
+ <string name="pref_max_retransmit_time_ms_key">max_retransmit_time_ms_preference</string>
+ <string name="pref_max_retransmit_time_ms_title">Max delay to retransmit.</string>
+ <string name="pref_max_retransmit_time_ms_dlg">Enter max delay to retransmit (in ms).</string>
+ <string name="pref_max_retransmit_time_ms_default" translatable="false">-1</string>
+
+ <string name="pref_max_retransmits_key">max_retransmits_preference</string>
+ <string name="pref_max_retransmits_title">Max attempts to retransmit.</string>
+ <string name="pref_max_retransmits_dlg">Enter max attempts to retransmit.</string>
+ <string name="pref_max_retransmits_default" translatable="false">-1</string>
+
+ <string name="pref_data_id_key">data_id_preference</string>
+ <string name="pref_data_id_title">Data id.</string>
+ <string name="pref_data_id_dlg">Enter data channel id.</string>
+ <string name="pref_data_id_default" translatable="false">-1</string>
+
+ <string name="pref_miscsettings_key">misc_settings_key</string>
+ <string name="pref_miscsettings_title">Miscellaneous settings.</string>
+
+ <string name="pref_room_server_url_key">room_server_url_preference</string>
+ <string name="pref_room_server_url_title">Room server URL.</string>
+ <string name="pref_room_server_url_dlg">Enter a room server URL.</string>
+ <string name="pref_room_server_url_default" translatable="false">https://appr.tc</string>
+
+ <string name="pref_displayhud_key">displayhud_preference</string>
+ <string name="pref_displayhud_title">Display call statistics.</string>
+ <string name="pref_displayhud_dlg">Display call statistics.</string>
+ <string name="pref_displayhud_default" translatable="false">false</string>
+
+ <string name="pref_tracing_key">tracing_preference</string>
+ <string name="pref_tracing_title">Debug performance tracing.</string>
+ <string name="pref_tracing_dlg">Debug performance tracing.</string>
+ <string name="pref_tracing_default" translatable="false">false</string>
+
+ <string name="pref_enable_rtceventlog_key">enable_rtceventlog_key</string>
+ <string name="pref_enable_rtceventlog_title">Enable RtcEventLog.</string>
+ <string name="pref_enable_rtceventlog_default">false</string>
+</resources>
diff --git a/third_party/libwebrtc/examples/androidapp/res/xml/preferences.xml b/third_party/libwebrtc/examples/androidapp/res/xml/preferences.xml
new file mode 100644
index 0000000000..14e74d5c0b
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/xml/preferences.xml
@@ -0,0 +1,247 @@
+<?xml version="1.0" encoding="utf-8"?>
+<PreferenceScreen xmlns:android="http://schemas.android.com/apk/res/android">
+ <PreferenceCategory
+ android:key="@string/pref_videosettings_key"
+ android:title="@string/pref_videosettings_title">
+
+ <CheckBoxPreference
+ android:key="@string/pref_videocall_key"
+ android:title="@string/pref_videocall_title"
+ android:dialogTitle="@string/pref_videocall_dlg"
+ android:defaultValue="@string/pref_videocall_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_screencapture_key"
+ android:title="@string/pref_screencapture_title"
+ android:defaultValue="@string/pref_screencapture_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_camera2_key"
+ android:title="@string/pref_camera2_title"
+ android:defaultValue="@string/pref_camera2_default" />
+
+ <ListPreference
+ android:key="@string/pref_resolution_key"
+ android:title="@string/pref_resolution_title"
+ android:defaultValue="@string/pref_resolution_default"
+ android:dialogTitle="@string/pref_resolution_dlg"
+ android:entries="@array/videoResolutions"
+ android:entryValues="@array/videoResolutionsValues" />
+
+ <ListPreference
+ android:key="@string/pref_fps_key"
+ android:title="@string/pref_fps_title"
+ android:defaultValue="@string/pref_fps_default"
+ android:dialogTitle="@string/pref_fps_dlg"
+ android:entries="@array/cameraFps"
+ android:entryValues="@array/cameraFps" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_capturequalityslider_key"
+ android:title="@string/pref_capturequalityslider_title"
+ android:dialogTitle="@string/pref_capturequalityslider_dlg"
+ android:defaultValue="@string/pref_capturequalityslider_default" />
+
+ <ListPreference
+ android:key="@string/pref_maxvideobitrate_key"
+ android:title="@string/pref_maxvideobitrate_title"
+ android:defaultValue="@string/pref_maxvideobitrate_default"
+ android:dialogTitle="@string/pref_maxvideobitrate_dlg"
+ android:entries="@array/startBitrate"
+ android:entryValues="@array/startBitrate" />
+
+ <EditTextPreference
+ android:key="@string/pref_maxvideobitratevalue_key"
+ android:title="@string/pref_maxvideobitratevalue_title"
+ android:inputType="number"
+ android:defaultValue="@string/pref_maxvideobitratevalue_default"
+ android:dialogTitle="@string/pref_maxvideobitratevalue_dlg" />
+
+ <ListPreference
+ android:key="@string/pref_videocodec_key"
+ android:title="@string/pref_videocodec_title"
+ android:defaultValue="@string/pref_videocodec_default"
+ android:dialogTitle="@string/pref_videocodec_dlg"
+ android:entries="@array/videoCodecs"
+ android:entryValues="@array/videoCodecs" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_hwcodec_key"
+ android:title="@string/pref_hwcodec_title"
+ android:dialogTitle="@string/pref_hwcodec_dlg"
+ android:defaultValue="@string/pref_hwcodec_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_capturetotexture_key"
+ android:title="@string/pref_capturetotexture_title"
+ android:dialogTitle="@string/pref_capturetotexture_dlg"
+ android:defaultValue="@string/pref_capturetotexture_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_flexfec_key"
+ android:title="@string/pref_flexfec_title"
+ android:dialogTitle="@string/pref_flexfec_dlg"
+ android:defaultValue="@string/pref_flexfec_default" />
+ </PreferenceCategory>
+
+ <PreferenceCategory
+ android:key="@string/pref_audiosettings_key"
+ android:title="@string/pref_audiosettings_title">
+
+ <ListPreference
+ android:key="@string/pref_startaudiobitrate_key"
+ android:title="@string/pref_startaudiobitrate_title"
+ android:defaultValue="@string/pref_startaudiobitrate_default"
+ android:dialogTitle="@string/pref_startaudiobitrate_dlg"
+ android:entries="@array/startBitrate"
+ android:entryValues="@array/startBitrate" />
+
+ <EditTextPreference
+ android:key="@string/pref_startaudiobitratevalue_key"
+ android:title="@string/pref_startaudiobitratevalue_title"
+ android:inputType="number"
+ android:defaultValue="@string/pref_startaudiobitratevalue_default"
+ android:dialogTitle="@string/pref_startaudiobitratevalue_dlg" />
+
+ <ListPreference
+ android:key="@string/pref_audiocodec_key"
+ android:title="@string/pref_audiocodec_title"
+ android:defaultValue="@string/pref_audiocodec_default"
+ android:dialogTitle="@string/pref_audiocodec_dlg"
+ android:entries="@array/audioCodecs"
+ android:entryValues="@array/audioCodecs" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_noaudioprocessing_key"
+ android:title="@string/pref_noaudioprocessing_title"
+ android:dialogTitle="@string/pref_noaudioprocessing_dlg"
+ android:defaultValue="@string/pref_noaudioprocessing_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_aecdump_key"
+ android:title="@string/pref_aecdump_title"
+ android:dialogTitle="@string/pref_aecdump_dlg"
+ android:defaultValue="@string/pref_aecdump_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_enable_save_input_audio_to_file_key"
+ android:title="@string/pref_enable_save_input_audio_to_file_title"
+ android:dialogTitle="@string/pref_enable_save_input_audio_to_file_dlg"
+ android:defaultValue="@string/pref_enable_save_input_audio_to_file_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_opensles_key"
+ android:title="@string/pref_opensles_title"
+ android:dialogTitle="@string/pref_opensles_dlg"
+ android:defaultValue="@string/pref_opensles_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_disable_built_in_aec_key"
+ android:title="@string/pref_disable_built_in_aec_title"
+ android:dialogTitle="@string/pref_disable_built_in_aec_dlg"
+ android:defaultValue="@string/pref_disable_built_in_aec_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_disable_built_in_agc_key"
+ android:title="@string/pref_disable_built_in_agc_title"
+ android:dialogTitle="@string/pref_disable_built_in_agc_dlg"
+ android:defaultValue="@string/pref_disable_built_in_agc_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_disable_built_in_ns_key"
+ android:title="@string/pref_disable_built_in_ns_title"
+ android:dialogTitle="@string/pref_disable_built_in_ns_dlg"
+ android:defaultValue="@string/pref_disable_built_in_ns_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_disable_webrtc_agc_and_hpf_key"
+ android:title="@string/pref_disable_webrtc_agc_and_hpf_title"
+ android:defaultValue="@string/pref_disable_webrtc_agc_default" />
+
+ <ListPreference
+ android:key="@string/pref_speakerphone_key"
+ android:title="@string/pref_speakerphone_title"
+ android:defaultValue="@string/pref_speakerphone_default"
+ android:dialogTitle="@string/pref_speakerphone_dlg"
+ android:entries="@array/speakerphone"
+ android:entryValues="@array/speakerphoneValues" />
+ </PreferenceCategory>
+
+ <PreferenceCategory
+ android:key="@string/pref_datasettings_key"
+ android:title="@string/pref_datasettings_title">
+
+ <CheckBoxPreference
+ android:key="@string/pref_enable_datachannel_key"
+ android:title="@string/pref_enable_datachannel_title"
+ android:defaultValue="@string/pref_enable_datachannel_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_ordered_key"
+ android:title="@string/pref_ordered_title"
+ android:defaultValue="@string/pref_ordered_default" />
+
+ <EditTextPreference
+ android:key="@string/pref_data_protocol_key"
+ android:title="@string/pref_data_protocol_title"
+ android:inputType="text"
+ android:defaultValue="@string/pref_data_protocol_default"
+ android:dialogTitle="@string/pref_data_protocol_dlg" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_negotiated_key"
+ android:title="@string/pref_negotiated_title"
+ android:defaultValue="@string/pref_negotiated_default" />
+
+ <EditTextPreference
+ android:key="@string/pref_max_retransmit_time_ms_key"
+ android:title="@string/pref_max_retransmit_time_ms_title"
+ android:inputType="number"
+ android:defaultValue="@string/pref_max_retransmit_time_ms_default"
+ android:dialogTitle="@string/pref_max_retransmit_time_ms_dlg" />
+
+ <EditTextPreference
+ android:key="@string/pref_max_retransmits_key"
+ android:title="@string/pref_max_retransmits_title"
+ android:inputType="number"
+ android:defaultValue="@string/pref_max_retransmits_default"
+ android:dialogTitle="@string/pref_max_retransmits_dlg" />
+
+ <EditTextPreference
+ android:key="@string/pref_data_id_key"
+ android:title="@string/pref_data_id_title"
+ android:inputType="number"
+ android:defaultValue="@string/pref_data_id_default"
+ android:dialogTitle="@string/pref_data_id_dlg" />
+ </PreferenceCategory>
+
+ <PreferenceCategory
+ android:key="@string/pref_miscsettings_key"
+ android:title="@string/pref_miscsettings_title">
+
+ <EditTextPreference
+ android:key="@string/pref_room_server_url_key"
+ android:title="@string/pref_room_server_url_title"
+ android:inputType="text"
+ android:defaultValue="@string/pref_room_server_url_default"
+ android:dialogTitle="@string/pref_room_server_url_dlg" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_displayhud_key"
+ android:title="@string/pref_displayhud_title"
+ android:dialogTitle="@string/pref_displayhud_dlg"
+ android:defaultValue="@string/pref_displayhud_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_tracing_key"
+ android:title="@string/pref_tracing_title"
+ android:dialogTitle="@string/pref_tracing_dlg"
+ android:defaultValue="@string/pref_tracing_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_enable_rtceventlog_key"
+ android:title="@string/pref_enable_rtceventlog_title"
+ android:defaultValue="@string/pref_enable_rtceventlog_default"/>
+ </PreferenceCategory>
+
+</PreferenceScreen>
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java
new file mode 100644
index 0000000000..2536b131a1
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java
@@ -0,0 +1,594 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.content.SharedPreferences;
+import android.content.pm.PackageManager;
+import android.media.AudioDeviceInfo;
+import android.media.AudioManager;
+import android.os.Build;
+import android.preference.PreferenceManager;
+import android.util.Log;
+import androidx.annotation.Nullable;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Set;
+import org.appspot.apprtc.util.AppRTCUtils;
+import org.webrtc.ThreadUtils;
+
+/**
+ * AppRTCAudioManager manages all audio related parts of the AppRTC demo.
+ */
+public class AppRTCAudioManager {
+ private static final String TAG = "AppRTCAudioManager";
+ private static final String SPEAKERPHONE_AUTO = "auto";
+ private static final String SPEAKERPHONE_TRUE = "true";
+ private static final String SPEAKERPHONE_FALSE = "false";
+
+ /**
+ * AudioDevice is the names of possible audio devices that we currently
+ * support.
+ */
+ public enum AudioDevice { SPEAKER_PHONE, WIRED_HEADSET, EARPIECE, BLUETOOTH, NONE }
+
+ /** AudioManager state. */
+ public enum AudioManagerState {
+ UNINITIALIZED,
+ PREINITIALIZED,
+ RUNNING,
+ }
+
+ /** Selected audio device change event. */
+ public interface AudioManagerEvents {
+ // Callback fired once audio device is changed or list of available audio devices changed.
+ void onAudioDeviceChanged(
+ AudioDevice selectedAudioDevice, Set<AudioDevice> availableAudioDevices);
+ }
+
+ private final Context apprtcContext;
+ @Nullable
+ private AudioManager audioManager;
+
+ @Nullable
+ private AudioManagerEvents audioManagerEvents;
+ private AudioManagerState amState;
+ private int savedAudioMode = AudioManager.MODE_INVALID;
+ private boolean savedIsSpeakerPhoneOn;
+ private boolean savedIsMicrophoneMute;
+ private boolean hasWiredHeadset;
+
+ // Default audio device; speaker phone for video calls or earpiece for audio
+ // only calls.
+ private AudioDevice defaultAudioDevice;
+
+ // Contains the currently selected audio device.
+ // This device is changed automatically using a certain scheme where e.g.
+ // a wired headset "wins" over speaker phone. It is also possible for a
+ // user to explicitly select a device (and overrid any predefined scheme).
+ // See `userSelectedAudioDevice` for details.
+ private AudioDevice selectedAudioDevice;
+
+ // Contains the user-selected audio device which overrides the predefined
+ // selection scheme.
+ // TODO(henrika): always set to AudioDevice.NONE today. Add support for
+ // explicit selection based on choice by userSelectedAudioDevice.
+ private AudioDevice userSelectedAudioDevice;
+
+ // Contains speakerphone setting: auto, true or false
+ @Nullable private final String useSpeakerphone;
+
+ // Proximity sensor object. It measures the proximity of an object in cm
+ // relative to the view screen of a device and can therefore be used to
+ // assist device switching (close to ear <=> use headset earpiece if
+ // available, far from ear <=> use speaker phone).
+ @Nullable private AppRTCProximitySensor proximitySensor;
+
+ // Handles all tasks related to Bluetooth headset devices.
+ private final AppRTCBluetoothManager bluetoothManager;
+
+ // Contains a list of available audio devices. A Set collection is used to
+ // avoid duplicate elements.
+ private Set<AudioDevice> audioDevices = new HashSet<>();
+
+ // Broadcast receiver for wired headset intent broadcasts.
+ private BroadcastReceiver wiredHeadsetReceiver;
+
+ // Callback method for changes in audio focus.
+ @Nullable
+ private AudioManager.OnAudioFocusChangeListener audioFocusChangeListener;
+
+ /**
+ * This method is called when the proximity sensor reports a state change,
+ * e.g. from "NEAR to FAR" or from "FAR to NEAR".
+ */
+ private void onProximitySensorChangedState() {
+ if (!useSpeakerphone.equals(SPEAKERPHONE_AUTO)) {
+ return;
+ }
+
+ // The proximity sensor should only be activated when there are exactly two
+ // available audio devices.
+ if (audioDevices.size() == 2 && audioDevices.contains(AppRTCAudioManager.AudioDevice.EARPIECE)
+ && audioDevices.contains(AppRTCAudioManager.AudioDevice.SPEAKER_PHONE)) {
+ if (proximitySensor.sensorReportsNearState()) {
+ // Sensor reports that a "handset is being held up to a person's ear",
+ // or "something is covering the light sensor".
+ setAudioDeviceInternal(AppRTCAudioManager.AudioDevice.EARPIECE);
+ } else {
+ // Sensor reports that a "handset is removed from a person's ear", or
+ // "the light sensor is no longer covered".
+ setAudioDeviceInternal(AppRTCAudioManager.AudioDevice.SPEAKER_PHONE);
+ }
+ }
+ }
+
+ /* Receiver which handles changes in wired headset availability. */
+ private class WiredHeadsetReceiver extends BroadcastReceiver {
+ private static final int STATE_UNPLUGGED = 0;
+ private static final int STATE_PLUGGED = 1;
+ private static final int HAS_NO_MIC = 0;
+ private static final int HAS_MIC = 1;
+
+ @Override
+ public void onReceive(Context context, Intent intent) {
+ int state = intent.getIntExtra("state", STATE_UNPLUGGED);
+ int microphone = intent.getIntExtra("microphone", HAS_NO_MIC);
+ String name = intent.getStringExtra("name");
+ Log.d(TAG, "WiredHeadsetReceiver.onReceive" + AppRTCUtils.getThreadInfo() + ": "
+ + "a=" + intent.getAction() + ", s="
+ + (state == STATE_UNPLUGGED ? "unplugged" : "plugged") + ", m="
+ + (microphone == HAS_MIC ? "mic" : "no mic") + ", n=" + name + ", sb="
+ + isInitialStickyBroadcast());
+ hasWiredHeadset = (state == STATE_PLUGGED);
+ updateAudioDeviceState();
+ }
+ }
+
+ /** Construction. */
+ static AppRTCAudioManager create(Context context) {
+ return new AppRTCAudioManager(context);
+ }
+
+ private AppRTCAudioManager(Context context) {
+ Log.d(TAG, "ctor");
+ ThreadUtils.checkIsOnMainThread();
+ apprtcContext = context;
+ audioManager = ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
+ bluetoothManager = AppRTCBluetoothManager.create(context, this);
+ wiredHeadsetReceiver = new WiredHeadsetReceiver();
+ amState = AudioManagerState.UNINITIALIZED;
+
+ SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context);
+ useSpeakerphone = sharedPreferences.getString(context.getString(R.string.pref_speakerphone_key),
+ context.getString(R.string.pref_speakerphone_default));
+ Log.d(TAG, "useSpeakerphone: " + useSpeakerphone);
+ if (useSpeakerphone.equals(SPEAKERPHONE_FALSE)) {
+ defaultAudioDevice = AudioDevice.EARPIECE;
+ } else {
+ defaultAudioDevice = AudioDevice.SPEAKER_PHONE;
+ }
+
+ // Create and initialize the proximity sensor.
+ // Tablet devices (e.g. Nexus 7) does not support proximity sensors.
+ // Note that, the sensor will not be active until start() has been called.
+ proximitySensor = AppRTCProximitySensor.create(context,
+ // This method will be called each time a state change is detected.
+ // Example: user holds their hand over the device (closer than ~5 cm),
+ // or removes their hand from the device.
+ this ::onProximitySensorChangedState);
+
+ Log.d(TAG, "defaultAudioDevice: " + defaultAudioDevice);
+ AppRTCUtils.logDeviceInfo(TAG);
+ }
+
+ @SuppressWarnings("deprecation") // TODO(henrika): audioManager.requestAudioFocus() is deprecated.
+ public void start(AudioManagerEvents audioManagerEvents) {
+ Log.d(TAG, "start");
+ ThreadUtils.checkIsOnMainThread();
+ if (amState == AudioManagerState.RUNNING) {
+ Log.e(TAG, "AudioManager is already active");
+ return;
+ }
+ // TODO(henrika): perhaps call new method called preInitAudio() here if UNINITIALIZED.
+
+ Log.d(TAG, "AudioManager starts...");
+ this.audioManagerEvents = audioManagerEvents;
+ amState = AudioManagerState.RUNNING;
+
+ // Store current audio state so we can restore it when stop() is called.
+ savedAudioMode = audioManager.getMode();
+ savedIsSpeakerPhoneOn = audioManager.isSpeakerphoneOn();
+ savedIsMicrophoneMute = audioManager.isMicrophoneMute();
+ hasWiredHeadset = hasWiredHeadset();
+
+ // Create an AudioManager.OnAudioFocusChangeListener instance.
+ audioFocusChangeListener = new AudioManager.OnAudioFocusChangeListener() {
+ // Called on the listener to notify if the audio focus for this listener has been changed.
+ // The `focusChange` value indicates whether the focus was gained, whether the focus was lost,
+ // and whether that loss is transient, or whether the new focus holder will hold it for an
+ // unknown amount of time.
+ // TODO(henrika): possibly extend support of handling audio-focus changes. Only contains
+ // logging for now.
+ @Override
+ public void onAudioFocusChange(int focusChange) {
+ final String typeOfChange;
+ switch (focusChange) {
+ case AudioManager.AUDIOFOCUS_GAIN:
+ typeOfChange = "AUDIOFOCUS_GAIN";
+ break;
+ case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT:
+ typeOfChange = "AUDIOFOCUS_GAIN_TRANSIENT";
+ break;
+ case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE:
+ typeOfChange = "AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE";
+ break;
+ case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK:
+ typeOfChange = "AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK";
+ break;
+ case AudioManager.AUDIOFOCUS_LOSS:
+ typeOfChange = "AUDIOFOCUS_LOSS";
+ break;
+ case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
+ typeOfChange = "AUDIOFOCUS_LOSS_TRANSIENT";
+ break;
+ case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
+ typeOfChange = "AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK";
+ break;
+ default:
+ typeOfChange = "AUDIOFOCUS_INVALID";
+ break;
+ }
+ Log.d(TAG, "onAudioFocusChange: " + typeOfChange);
+ }
+ };
+
+ // Request audio playout focus (without ducking) and install listener for changes in focus.
+ int result = audioManager.requestAudioFocus(audioFocusChangeListener,
+ AudioManager.STREAM_VOICE_CALL, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT);
+ if (result == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
+ Log.d(TAG, "Audio focus request granted for VOICE_CALL streams");
+ } else {
+ Log.e(TAG, "Audio focus request failed");
+ }
+
+ // Start by setting MODE_IN_COMMUNICATION as default audio mode. It is
+ // required to be in this mode when playout and/or recording starts for
+ // best possible VoIP performance.
+ audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
+
+ // Always disable microphone mute during a WebRTC call.
+ setMicrophoneMute(false);
+
+ // Set initial device states.
+ userSelectedAudioDevice = AudioDevice.NONE;
+ selectedAudioDevice = AudioDevice.NONE;
+ audioDevices.clear();
+
+ // Initialize and start Bluetooth if a BT device is available or initiate
+ // detection of new (enabled) BT devices.
+ bluetoothManager.start();
+
+ // Do initial selection of audio device. This setting can later be changed
+ // either by adding/removing a BT or wired headset or by covering/uncovering
+ // the proximity sensor.
+ updateAudioDeviceState();
+
+ // Register receiver for broadcast intents related to adding/removing a
+ // wired headset.
+ registerReceiver(wiredHeadsetReceiver, new IntentFilter(Intent.ACTION_HEADSET_PLUG));
+ Log.d(TAG, "AudioManager started");
+ }
+
+ @SuppressWarnings("deprecation") // TODO(henrika): audioManager.abandonAudioFocus() is deprecated.
+ public void stop() {
+ Log.d(TAG, "stop");
+ ThreadUtils.checkIsOnMainThread();
+ if (amState != AudioManagerState.RUNNING) {
+ Log.e(TAG, "Trying to stop AudioManager in incorrect state: " + amState);
+ return;
+ }
+ amState = AudioManagerState.UNINITIALIZED;
+
+ unregisterReceiver(wiredHeadsetReceiver);
+
+ bluetoothManager.stop();
+
+ // Restore previously stored audio states.
+ setSpeakerphoneOn(savedIsSpeakerPhoneOn);
+ setMicrophoneMute(savedIsMicrophoneMute);
+ audioManager.setMode(savedAudioMode);
+
+ // Abandon audio focus. Gives the previous focus owner, if any, focus.
+ audioManager.abandonAudioFocus(audioFocusChangeListener);
+ audioFocusChangeListener = null;
+ Log.d(TAG, "Abandoned audio focus for VOICE_CALL streams");
+
+ if (proximitySensor != null) {
+ proximitySensor.stop();
+ proximitySensor = null;
+ }
+
+ audioManagerEvents = null;
+ Log.d(TAG, "AudioManager stopped");
+ }
+
+ /** Changes selection of the currently active audio device. */
+ private void setAudioDeviceInternal(AudioDevice device) {
+ Log.d(TAG, "setAudioDeviceInternal(device=" + device + ")");
+ AppRTCUtils.assertIsTrue(audioDevices.contains(device));
+
+ switch (device) {
+ case SPEAKER_PHONE:
+ setSpeakerphoneOn(true);
+ break;
+ case EARPIECE:
+ setSpeakerphoneOn(false);
+ break;
+ case WIRED_HEADSET:
+ setSpeakerphoneOn(false);
+ break;
+ case BLUETOOTH:
+ setSpeakerphoneOn(false);
+ break;
+ default:
+ Log.e(TAG, "Invalid audio device selection");
+ break;
+ }
+ selectedAudioDevice = device;
+ }
+
+ /**
+ * Changes default audio device.
+ * TODO(henrika): add usage of this method in the AppRTCMobile client.
+ */
+ public void setDefaultAudioDevice(AudioDevice defaultDevice) {
+ ThreadUtils.checkIsOnMainThread();
+ switch (defaultDevice) {
+ case SPEAKER_PHONE:
+ defaultAudioDevice = defaultDevice;
+ break;
+ case EARPIECE:
+ if (hasEarpiece()) {
+ defaultAudioDevice = defaultDevice;
+ } else {
+ defaultAudioDevice = AudioDevice.SPEAKER_PHONE;
+ }
+ break;
+ default:
+ Log.e(TAG, "Invalid default audio device selection");
+ break;
+ }
+ Log.d(TAG, "setDefaultAudioDevice(device=" + defaultAudioDevice + ")");
+ updateAudioDeviceState();
+ }
+
+ /** Changes selection of the currently active audio device. */
+ public void selectAudioDevice(AudioDevice device) {
+ ThreadUtils.checkIsOnMainThread();
+ if (!audioDevices.contains(device)) {
+ Log.e(TAG, "Can not select " + device + " from available " + audioDevices);
+ }
+ userSelectedAudioDevice = device;
+ updateAudioDeviceState();
+ }
+
+ /** Returns current set of available/selectable audio devices. */
+ public Set<AudioDevice> getAudioDevices() {
+ ThreadUtils.checkIsOnMainThread();
+ return Collections.unmodifiableSet(new HashSet<>(audioDevices));
+ }
+
+ /** Returns the currently selected audio device. */
+ public AudioDevice getSelectedAudioDevice() {
+ ThreadUtils.checkIsOnMainThread();
+ return selectedAudioDevice;
+ }
+
+ /** Helper method for receiver registration. */
+ private void registerReceiver(BroadcastReceiver receiver, IntentFilter filter) {
+ apprtcContext.registerReceiver(receiver, filter);
+ }
+
+ /** Helper method for unregistration of an existing receiver. */
+ private void unregisterReceiver(BroadcastReceiver receiver) {
+ apprtcContext.unregisterReceiver(receiver);
+ }
+
+ /** Sets the speaker phone mode. */
+ private void setSpeakerphoneOn(boolean on) {
+ boolean wasOn = audioManager.isSpeakerphoneOn();
+ if (wasOn == on) {
+ return;
+ }
+ audioManager.setSpeakerphoneOn(on);
+ }
+
+ /** Sets the microphone mute state. */
+ private void setMicrophoneMute(boolean on) {
+ boolean wasMuted = audioManager.isMicrophoneMute();
+ if (wasMuted == on) {
+ return;
+ }
+ audioManager.setMicrophoneMute(on);
+ }
+
+ /** Gets the current earpiece state. */
+ private boolean hasEarpiece() {
+ return apprtcContext.getPackageManager().hasSystemFeature(PackageManager.FEATURE_TELEPHONY);
+ }
+
+ /**
+ * Checks whether a wired headset is connected or not.
+ * This is not a valid indication that audio playback is actually over
+ * the wired headset as audio routing depends on other conditions. We
+ * only use it as an early indicator (during initialization) of an attached
+ * wired headset.
+ */
+ @Deprecated
+ private boolean hasWiredHeadset() {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
+ return audioManager.isWiredHeadsetOn();
+ } else {
+ final AudioDeviceInfo[] devices = audioManager.getDevices(AudioManager.GET_DEVICES_ALL);
+ for (AudioDeviceInfo device : devices) {
+ final int type = device.getType();
+ if (type == AudioDeviceInfo.TYPE_WIRED_HEADSET) {
+ Log.d(TAG, "hasWiredHeadset: found wired headset");
+ return true;
+ } else if (type == AudioDeviceInfo.TYPE_USB_DEVICE) {
+ Log.d(TAG, "hasWiredHeadset: found USB audio device");
+ return true;
+ }
+ }
+ return false;
+ }
+ }
+
+ /**
+ * Updates list of possible audio devices and make new device selection.
+ * TODO(henrika): add unit test to verify all state transitions.
+ */
+ public void updateAudioDeviceState() {
+ ThreadUtils.checkIsOnMainThread();
+ Log.d(TAG, "--- updateAudioDeviceState: "
+ + "wired headset=" + hasWiredHeadset + ", "
+ + "BT state=" + bluetoothManager.getState());
+ Log.d(TAG, "Device status: "
+ + "available=" + audioDevices + ", "
+ + "selected=" + selectedAudioDevice + ", "
+ + "user selected=" + userSelectedAudioDevice);
+
+ // Check if any Bluetooth headset is connected. The internal BT state will
+ // change accordingly.
+ // TODO(henrika): perhaps wrap required state into BT manager.
+ if (bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_AVAILABLE
+ || bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_UNAVAILABLE
+ || bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_DISCONNECTING) {
+ bluetoothManager.updateDevice();
+ }
+
+ // Update the set of available audio devices.
+ Set<AudioDevice> newAudioDevices = new HashSet<>();
+
+ if (bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTED
+ || bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTING
+ || bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_AVAILABLE) {
+ newAudioDevices.add(AudioDevice.BLUETOOTH);
+ }
+
+ if (hasWiredHeadset) {
+ // If a wired headset is connected, then it is the only possible option.
+ newAudioDevices.add(AudioDevice.WIRED_HEADSET);
+ } else {
+ // No wired headset, hence the audio-device list can contain speaker
+ // phone (on a tablet), or speaker phone and earpiece (on mobile phone).
+ newAudioDevices.add(AudioDevice.SPEAKER_PHONE);
+ if (hasEarpiece()) {
+ newAudioDevices.add(AudioDevice.EARPIECE);
+ }
+ }
+ // Store state which is set to true if the device list has changed.
+ boolean audioDeviceSetUpdated = !audioDevices.equals(newAudioDevices);
+ // Update the existing audio device set.
+ audioDevices = newAudioDevices;
+ // Correct user selected audio devices if needed.
+ if (bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_UNAVAILABLE
+ && userSelectedAudioDevice == AudioDevice.BLUETOOTH) {
+ // If BT is not available, it can't be the user selection.
+ userSelectedAudioDevice = AudioDevice.NONE;
+ }
+ if (hasWiredHeadset && userSelectedAudioDevice == AudioDevice.SPEAKER_PHONE) {
+ // If user selected speaker phone, but then plugged wired headset then make
+ // wired headset as user selected device.
+ userSelectedAudioDevice = AudioDevice.WIRED_HEADSET;
+ }
+ if (!hasWiredHeadset && userSelectedAudioDevice == AudioDevice.WIRED_HEADSET) {
+ // If user selected wired headset, but then unplugged wired headset then make
+ // speaker phone as user selected device.
+ userSelectedAudioDevice = AudioDevice.SPEAKER_PHONE;
+ }
+
+ // Need to start Bluetooth if it is available and user either selected it explicitly or
+ // user did not select any output device.
+ boolean needBluetoothAudioStart =
+ bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_AVAILABLE
+ && (userSelectedAudioDevice == AudioDevice.NONE
+ || userSelectedAudioDevice == AudioDevice.BLUETOOTH);
+
+ // Need to stop Bluetooth audio if user selected different device and
+ // Bluetooth SCO connection is established or in the process.
+ boolean needBluetoothAudioStop =
+ (bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTED
+ || bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTING)
+ && (userSelectedAudioDevice != AudioDevice.NONE
+ && userSelectedAudioDevice != AudioDevice.BLUETOOTH);
+
+ if (bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_AVAILABLE
+ || bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTING
+ || bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTED) {
+ Log.d(TAG, "Need BT audio: start=" + needBluetoothAudioStart + ", "
+ + "stop=" + needBluetoothAudioStop + ", "
+ + "BT state=" + bluetoothManager.getState());
+ }
+
+ // Start or stop Bluetooth SCO connection given states set earlier.
+ if (needBluetoothAudioStop) {
+ bluetoothManager.stopScoAudio();
+ bluetoothManager.updateDevice();
+ }
+
+ if (needBluetoothAudioStart && !needBluetoothAudioStop) {
+ // Attempt to start Bluetooth SCO audio (takes a few second to start).
+ if (!bluetoothManager.startScoAudio()) {
+ // Remove BLUETOOTH from list of available devices since SCO failed.
+ audioDevices.remove(AudioDevice.BLUETOOTH);
+ audioDeviceSetUpdated = true;
+ }
+ }
+
+ // Update selected audio device.
+ final AudioDevice newAudioDevice;
+
+ if (bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTED) {
+ // If a Bluetooth is connected, then it should be used as output audio
+ // device. Note that it is not sufficient that a headset is available;
+ // an active SCO channel must also be up and running.
+ newAudioDevice = AudioDevice.BLUETOOTH;
+ } else if (hasWiredHeadset) {
+ // If a wired headset is connected, but Bluetooth is not, then wired headset is used as
+ // audio device.
+ newAudioDevice = AudioDevice.WIRED_HEADSET;
+ } else {
+ // No wired headset and no Bluetooth, hence the audio-device list can contain speaker
+ // phone (on a tablet), or speaker phone and earpiece (on mobile phone).
+ // `defaultAudioDevice` contains either AudioDevice.SPEAKER_PHONE or AudioDevice.EARPIECE
+ // depending on the user's selection.
+ newAudioDevice = defaultAudioDevice;
+ }
+ // Switch to new device but only if there has been any changes.
+ if (newAudioDevice != selectedAudioDevice || audioDeviceSetUpdated) {
+ // Do the required device switch.
+ setAudioDeviceInternal(newAudioDevice);
+ Log.d(TAG, "New device status: "
+ + "available=" + audioDevices + ", "
+ + "selected=" + newAudioDevice);
+ if (audioManagerEvents != null) {
+ // Notify a listening client that audio device has been changed.
+ audioManagerEvents.onAudioDeviceChanged(selectedAudioDevice, audioDevices);
+ }
+ }
+ Log.d(TAG, "--- updateAudioDeviceState done");
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCBluetoothManager.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCBluetoothManager.java
new file mode 100644
index 0000000000..e9077d8bd6
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCBluetoothManager.java
@@ -0,0 +1,532 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.annotation.SuppressLint;
+import android.bluetooth.BluetoothAdapter;
+import android.bluetooth.BluetoothDevice;
+import android.bluetooth.BluetoothHeadset;
+import android.bluetooth.BluetoothProfile;
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.content.pm.PackageManager;
+import android.media.AudioManager;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Process;
+import android.util.Log;
+import androidx.annotation.Nullable;
+import java.util.List;
+import java.util.Set;
+import org.appspot.apprtc.util.AppRTCUtils;
+import org.webrtc.ThreadUtils;
+
+/**
+ * AppRTCProximitySensor manages functions related to Bluetoth devices in the
+ * AppRTC demo.
+ */
+public class AppRTCBluetoothManager {
+ private static final String TAG = "AppRTCBluetoothManager";
+
+ // Timeout interval for starting or stopping audio to a Bluetooth SCO device.
+ private static final int BLUETOOTH_SCO_TIMEOUT_MS = 4000;
+ // Maximum number of SCO connection attempts.
+ private static final int MAX_SCO_CONNECTION_ATTEMPTS = 2;
+
+ // Bluetooth connection state.
+ public enum State {
+ // Bluetooth is not available; no adapter or Bluetooth is off.
+ UNINITIALIZED,
+ // Bluetooth error happened when trying to start Bluetooth.
+ ERROR,
+ // Bluetooth proxy object for the Headset profile exists, but no connected headset devices,
+ // SCO is not started or disconnected.
+ HEADSET_UNAVAILABLE,
+ // Bluetooth proxy object for the Headset profile connected, connected Bluetooth headset
+ // present, but SCO is not started or disconnected.
+ HEADSET_AVAILABLE,
+ // Bluetooth audio SCO connection with remote device is closing.
+ SCO_DISCONNECTING,
+ // Bluetooth audio SCO connection with remote device is initiated.
+ SCO_CONNECTING,
+ // Bluetooth audio SCO connection with remote device is established.
+ SCO_CONNECTED
+ }
+
+ private final Context apprtcContext;
+ private final AppRTCAudioManager apprtcAudioManager;
+ @Nullable
+ private final AudioManager audioManager;
+ private final Handler handler;
+
+ int scoConnectionAttempts;
+ private State bluetoothState;
+ private final BluetoothProfile.ServiceListener bluetoothServiceListener;
+ @Nullable
+ private BluetoothAdapter bluetoothAdapter;
+ @Nullable
+ private BluetoothHeadset bluetoothHeadset;
+ @Nullable
+ private BluetoothDevice bluetoothDevice;
+ private final BroadcastReceiver bluetoothHeadsetReceiver;
+
+ // Runs when the Bluetooth timeout expires. We use that timeout after calling
+ // startScoAudio() or stopScoAudio() because we're not guaranteed to get a
+ // callback after those calls.
+ private final Runnable bluetoothTimeoutRunnable = new Runnable() {
+ @Override
+ public void run() {
+ bluetoothTimeout();
+ }
+ };
+
+ /**
+ * Implementation of an interface that notifies BluetoothProfile IPC clients when they have been
+ * connected to or disconnected from the service.
+ */
+ private class BluetoothServiceListener implements BluetoothProfile.ServiceListener {
+ @Override
+ // Called to notify the client when the proxy object has been connected to the service.
+ // Once we have the profile proxy object, we can use it to monitor the state of the
+ // connection and perform other operations that are relevant to the headset profile.
+ public void onServiceConnected(int profile, BluetoothProfile proxy) {
+ if (profile != BluetoothProfile.HEADSET || bluetoothState == State.UNINITIALIZED) {
+ return;
+ }
+ Log.d(TAG, "BluetoothServiceListener.onServiceConnected: BT state=" + bluetoothState);
+ // Android only supports one connected Bluetooth Headset at a time.
+ bluetoothHeadset = (BluetoothHeadset) proxy;
+ updateAudioDeviceState();
+ Log.d(TAG, "onServiceConnected done: BT state=" + bluetoothState);
+ }
+
+ @Override
+ /** Notifies the client when the proxy object has been disconnected from the service. */
+ public void onServiceDisconnected(int profile) {
+ if (profile != BluetoothProfile.HEADSET || bluetoothState == State.UNINITIALIZED) {
+ return;
+ }
+ Log.d(TAG, "BluetoothServiceListener.onServiceDisconnected: BT state=" + bluetoothState);
+ stopScoAudio();
+ bluetoothHeadset = null;
+ bluetoothDevice = null;
+ bluetoothState = State.HEADSET_UNAVAILABLE;
+ updateAudioDeviceState();
+ Log.d(TAG, "onServiceDisconnected done: BT state=" + bluetoothState);
+ }
+ }
+
+ // Intent broadcast receiver which handles changes in Bluetooth device availability.
+ // Detects headset changes and Bluetooth SCO state changes.
+ private class BluetoothHeadsetBroadcastReceiver extends BroadcastReceiver {
+ @Override
+ public void onReceive(Context context, Intent intent) {
+ if (bluetoothState == State.UNINITIALIZED) {
+ return;
+ }
+ final String action = intent.getAction();
+ // Change in connection state of the Headset profile. Note that the
+ // change does not tell us anything about whether we're streaming
+ // audio to BT over SCO. Typically received when user turns on a BT
+ // headset while audio is active using another audio device.
+ if (action.equals(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED)) {
+ final int state =
+ intent.getIntExtra(BluetoothHeadset.EXTRA_STATE, BluetoothHeadset.STATE_DISCONNECTED);
+ Log.d(TAG, "BluetoothHeadsetBroadcastReceiver.onReceive: "
+ + "a=ACTION_CONNECTION_STATE_CHANGED, "
+ + "s=" + stateToString(state) + ", "
+ + "sb=" + isInitialStickyBroadcast() + ", "
+ + "BT state: " + bluetoothState);
+ if (state == BluetoothHeadset.STATE_CONNECTED) {
+ scoConnectionAttempts = 0;
+ updateAudioDeviceState();
+ } else if (state == BluetoothHeadset.STATE_CONNECTING) {
+ // No action needed.
+ } else if (state == BluetoothHeadset.STATE_DISCONNECTING) {
+ // No action needed.
+ } else if (state == BluetoothHeadset.STATE_DISCONNECTED) {
+ // Bluetooth is probably powered off during the call.
+ stopScoAudio();
+ updateAudioDeviceState();
+ }
+ // Change in the audio (SCO) connection state of the Headset profile.
+ // Typically received after call to startScoAudio() has finalized.
+ } else if (action.equals(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED)) {
+ final int state = intent.getIntExtra(
+ BluetoothHeadset.EXTRA_STATE, BluetoothHeadset.STATE_AUDIO_DISCONNECTED);
+ Log.d(TAG, "BluetoothHeadsetBroadcastReceiver.onReceive: "
+ + "a=ACTION_AUDIO_STATE_CHANGED, "
+ + "s=" + stateToString(state) + ", "
+ + "sb=" + isInitialStickyBroadcast() + ", "
+ + "BT state: " + bluetoothState);
+ if (state == BluetoothHeadset.STATE_AUDIO_CONNECTED) {
+ cancelTimer();
+ if (bluetoothState == State.SCO_CONNECTING) {
+ Log.d(TAG, "+++ Bluetooth audio SCO is now connected");
+ bluetoothState = State.SCO_CONNECTED;
+ scoConnectionAttempts = 0;
+ updateAudioDeviceState();
+ } else {
+ Log.w(TAG, "Unexpected state BluetoothHeadset.STATE_AUDIO_CONNECTED");
+ }
+ } else if (state == BluetoothHeadset.STATE_AUDIO_CONNECTING) {
+ Log.d(TAG, "+++ Bluetooth audio SCO is now connecting...");
+ } else if (state == BluetoothHeadset.STATE_AUDIO_DISCONNECTED) {
+ Log.d(TAG, "+++ Bluetooth audio SCO is now disconnected");
+ if (isInitialStickyBroadcast()) {
+ Log.d(TAG, "Ignore STATE_AUDIO_DISCONNECTED initial sticky broadcast.");
+ return;
+ }
+ updateAudioDeviceState();
+ }
+ }
+ Log.d(TAG, "onReceive done: BT state=" + bluetoothState);
+ }
+ }
+
+ /** Construction. */
+ static AppRTCBluetoothManager create(Context context, AppRTCAudioManager audioManager) {
+ Log.d(TAG, "create" + AppRTCUtils.getThreadInfo());
+ return new AppRTCBluetoothManager(context, audioManager);
+ }
+
+ protected AppRTCBluetoothManager(Context context, AppRTCAudioManager audioManager) {
+ Log.d(TAG, "ctor");
+ ThreadUtils.checkIsOnMainThread();
+ apprtcContext = context;
+ apprtcAudioManager = audioManager;
+ this.audioManager = getAudioManager(context);
+ bluetoothState = State.UNINITIALIZED;
+ bluetoothServiceListener = new BluetoothServiceListener();
+ bluetoothHeadsetReceiver = new BluetoothHeadsetBroadcastReceiver();
+ handler = new Handler(Looper.getMainLooper());
+ }
+
+ /** Returns the internal state. */
+ public State getState() {
+ ThreadUtils.checkIsOnMainThread();
+ return bluetoothState;
+ }
+
+ /**
+ * Activates components required to detect Bluetooth devices and to enable
+ * BT SCO (audio is routed via BT SCO) for the headset profile. The end
+ * state will be HEADSET_UNAVAILABLE but a state machine has started which
+ * will start a state change sequence where the final outcome depends on
+ * if/when the BT headset is enabled.
+ * Example of state change sequence when start() is called while BT device
+ * is connected and enabled:
+ * UNINITIALIZED --> HEADSET_UNAVAILABLE --> HEADSET_AVAILABLE -->
+ * SCO_CONNECTING --> SCO_CONNECTED <==> audio is now routed via BT SCO.
+ * Note that the AppRTCAudioManager is also involved in driving this state
+ * change.
+ */
+ public void start() {
+ ThreadUtils.checkIsOnMainThread();
+ Log.d(TAG, "start");
+ if (!hasPermission(apprtcContext, android.Manifest.permission.BLUETOOTH)) {
+ Log.w(TAG, "Process (pid=" + Process.myPid() + ") lacks BLUETOOTH permission");
+ return;
+ }
+ if (bluetoothState != State.UNINITIALIZED) {
+ Log.w(TAG, "Invalid BT state");
+ return;
+ }
+ bluetoothHeadset = null;
+ bluetoothDevice = null;
+ scoConnectionAttempts = 0;
+ // Get a handle to the default local Bluetooth adapter.
+ bluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
+ if (bluetoothAdapter == null) {
+ Log.w(TAG, "Device does not support Bluetooth");
+ return;
+ }
+ // Ensure that the device supports use of BT SCO audio for off call use cases.
+ if (!audioManager.isBluetoothScoAvailableOffCall()) {
+ Log.e(TAG, "Bluetooth SCO audio is not available off call");
+ return;
+ }
+ logBluetoothAdapterInfo(bluetoothAdapter);
+ // Establish a connection to the HEADSET profile (includes both Bluetooth Headset and
+ // Hands-Free) proxy object and install a listener.
+ if (!getBluetoothProfileProxy(
+ apprtcContext, bluetoothServiceListener, BluetoothProfile.HEADSET)) {
+ Log.e(TAG, "BluetoothAdapter.getProfileProxy(HEADSET) failed");
+ return;
+ }
+ // Register receivers for BluetoothHeadset change notifications.
+ IntentFilter bluetoothHeadsetFilter = new IntentFilter();
+ // Register receiver for change in connection state of the Headset profile.
+ bluetoothHeadsetFilter.addAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED);
+ // Register receiver for change in audio connection state of the Headset profile.
+ bluetoothHeadsetFilter.addAction(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED);
+ registerReceiver(bluetoothHeadsetReceiver, bluetoothHeadsetFilter);
+ Log.d(TAG, "HEADSET profile state: "
+ + stateToString(bluetoothAdapter.getProfileConnectionState(BluetoothProfile.HEADSET)));
+ Log.d(TAG, "Bluetooth proxy for headset profile has started");
+ bluetoothState = State.HEADSET_UNAVAILABLE;
+ Log.d(TAG, "start done: BT state=" + bluetoothState);
+ }
+
+ /** Stops and closes all components related to Bluetooth audio. */
+ public void stop() {
+ ThreadUtils.checkIsOnMainThread();
+ Log.d(TAG, "stop: BT state=" + bluetoothState);
+ if (bluetoothAdapter == null) {
+ return;
+ }
+ // Stop BT SCO connection with remote device if needed.
+ stopScoAudio();
+ // Close down remaining BT resources.
+ if (bluetoothState == State.UNINITIALIZED) {
+ return;
+ }
+ unregisterReceiver(bluetoothHeadsetReceiver);
+ cancelTimer();
+ if (bluetoothHeadset != null) {
+ bluetoothAdapter.closeProfileProxy(BluetoothProfile.HEADSET, bluetoothHeadset);
+ bluetoothHeadset = null;
+ }
+ bluetoothAdapter = null;
+ bluetoothDevice = null;
+ bluetoothState = State.UNINITIALIZED;
+ Log.d(TAG, "stop done: BT state=" + bluetoothState);
+ }
+
+ /**
+ * Starts Bluetooth SCO connection with remote device.
+ * Note that the phone application always has the priority on the usage of the SCO connection
+ * for telephony. If this method is called while the phone is in call it will be ignored.
+ * Similarly, if a call is received or sent while an application is using the SCO connection,
+ * the connection will be lost for the application and NOT returned automatically when the call
+ * ends. Also note that: up to and including API version JELLY_BEAN_MR1, this method initiates a
+ * virtual voice call to the Bluetooth headset. After API version JELLY_BEAN_MR2 only a raw SCO
+ * audio connection is established.
+ * TODO(henrika): should we add support for virtual voice call to BT headset also for JBMR2 and
+ * higher. It might be required to initiates a virtual voice call since many devices do not
+ * accept SCO audio without a "call".
+ */
+ public boolean startScoAudio() {
+ ThreadUtils.checkIsOnMainThread();
+ Log.d(TAG, "startSco: BT state=" + bluetoothState + ", "
+ + "attempts: " + scoConnectionAttempts + ", "
+ + "SCO is on: " + isScoOn());
+ if (scoConnectionAttempts >= MAX_SCO_CONNECTION_ATTEMPTS) {
+ Log.e(TAG, "BT SCO connection fails - no more attempts");
+ return false;
+ }
+ if (bluetoothState != State.HEADSET_AVAILABLE) {
+ Log.e(TAG, "BT SCO connection fails - no headset available");
+ return false;
+ }
+ // Start BT SCO channel and wait for ACTION_AUDIO_STATE_CHANGED.
+ Log.d(TAG, "Starting Bluetooth SCO and waits for ACTION_AUDIO_STATE_CHANGED...");
+ // The SCO connection establishment can take several seconds, hence we cannot rely on the
+ // connection to be available when the method returns but instead register to receive the
+ // intent ACTION_SCO_AUDIO_STATE_UPDATED and wait for the state to be SCO_AUDIO_STATE_CONNECTED.
+ bluetoothState = State.SCO_CONNECTING;
+ audioManager.startBluetoothSco();
+ audioManager.setBluetoothScoOn(true);
+ scoConnectionAttempts++;
+ startTimer();
+ Log.d(TAG, "startScoAudio done: BT state=" + bluetoothState + ", "
+ + "SCO is on: " + isScoOn());
+ return true;
+ }
+
+ /** Stops Bluetooth SCO connection with remote device. */
+ public void stopScoAudio() {
+ ThreadUtils.checkIsOnMainThread();
+ Log.d(TAG, "stopScoAudio: BT state=" + bluetoothState + ", "
+ + "SCO is on: " + isScoOn());
+ if (bluetoothState != State.SCO_CONNECTING && bluetoothState != State.SCO_CONNECTED) {
+ return;
+ }
+ cancelTimer();
+ audioManager.stopBluetoothSco();
+ audioManager.setBluetoothScoOn(false);
+ bluetoothState = State.SCO_DISCONNECTING;
+ Log.d(TAG, "stopScoAudio done: BT state=" + bluetoothState + ", "
+ + "SCO is on: " + isScoOn());
+ }
+
+ /**
+ * Use the BluetoothHeadset proxy object (controls the Bluetooth Headset
+ * Service via IPC) to update the list of connected devices for the HEADSET
+ * profile. The internal state will change to HEADSET_UNAVAILABLE or to
+ * HEADSET_AVAILABLE and `bluetoothDevice` will be mapped to the connected
+ * device if available.
+ */
+ public void updateDevice() {
+ if (bluetoothState == State.UNINITIALIZED || bluetoothHeadset == null) {
+ return;
+ }
+ Log.d(TAG, "updateDevice");
+ // Get connected devices for the headset profile. Returns the set of
+ // devices which are in state STATE_CONNECTED. The BluetoothDevice class
+ // is just a thin wrapper for a Bluetooth hardware address.
+ List<BluetoothDevice> devices = bluetoothHeadset.getConnectedDevices();
+ if (devices.isEmpty()) {
+ bluetoothDevice = null;
+ bluetoothState = State.HEADSET_UNAVAILABLE;
+ Log.d(TAG, "No connected bluetooth headset");
+ } else {
+ // Always use first device in list. Android only supports one device.
+ bluetoothDevice = devices.get(0);
+ bluetoothState = State.HEADSET_AVAILABLE;
+ Log.d(TAG, "Connected bluetooth headset: "
+ + "name=" + bluetoothDevice.getName() + ", "
+ + "state=" + stateToString(bluetoothHeadset.getConnectionState(bluetoothDevice))
+ + ", SCO audio=" + bluetoothHeadset.isAudioConnected(bluetoothDevice));
+ }
+ Log.d(TAG, "updateDevice done: BT state=" + bluetoothState);
+ }
+
+ /**
+ * Stubs for test mocks.
+ */
+ @Nullable
+ protected AudioManager getAudioManager(Context context) {
+ return (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+ }
+
+ protected void registerReceiver(BroadcastReceiver receiver, IntentFilter filter) {
+ apprtcContext.registerReceiver(receiver, filter);
+ }
+
+ protected void unregisterReceiver(BroadcastReceiver receiver) {
+ apprtcContext.unregisterReceiver(receiver);
+ }
+
+ protected boolean getBluetoothProfileProxy(
+ Context context, BluetoothProfile.ServiceListener listener, int profile) {
+ return bluetoothAdapter.getProfileProxy(context, listener, profile);
+ }
+
+ protected boolean hasPermission(Context context, String permission) {
+ return apprtcContext.checkPermission(permission, Process.myPid(), Process.myUid())
+ == PackageManager.PERMISSION_GRANTED;
+ }
+
+ /** Logs the state of the local Bluetooth adapter. */
+ @SuppressLint("HardwareIds")
+ protected void logBluetoothAdapterInfo(BluetoothAdapter localAdapter) {
+ Log.d(TAG, "BluetoothAdapter: "
+ + "enabled=" + localAdapter.isEnabled() + ", "
+ + "state=" + stateToString(localAdapter.getState()) + ", "
+ + "name=" + localAdapter.getName() + ", "
+ + "address=" + localAdapter.getAddress());
+ // Log the set of BluetoothDevice objects that are bonded (paired) to the local adapter.
+ Set<BluetoothDevice> pairedDevices = localAdapter.getBondedDevices();
+ if (!pairedDevices.isEmpty()) {
+ Log.d(TAG, "paired devices:");
+ for (BluetoothDevice device : pairedDevices) {
+ Log.d(TAG, " name=" + device.getName() + ", address=" + device.getAddress());
+ }
+ }
+ }
+
+ /** Ensures that the audio manager updates its list of available audio devices. */
+ private void updateAudioDeviceState() {
+ ThreadUtils.checkIsOnMainThread();
+ Log.d(TAG, "updateAudioDeviceState");
+ apprtcAudioManager.updateAudioDeviceState();
+ }
+
+ /** Starts timer which times out after BLUETOOTH_SCO_TIMEOUT_MS milliseconds. */
+ private void startTimer() {
+ ThreadUtils.checkIsOnMainThread();
+ Log.d(TAG, "startTimer");
+ handler.postDelayed(bluetoothTimeoutRunnable, BLUETOOTH_SCO_TIMEOUT_MS);
+ }
+
+ /** Cancels any outstanding timer tasks. */
+ private void cancelTimer() {
+ ThreadUtils.checkIsOnMainThread();
+ Log.d(TAG, "cancelTimer");
+ handler.removeCallbacks(bluetoothTimeoutRunnable);
+ }
+
+ /**
+ * Called when start of the BT SCO channel takes too long time. Usually
+ * happens when the BT device has been turned on during an ongoing call.
+ */
+ private void bluetoothTimeout() {
+ ThreadUtils.checkIsOnMainThread();
+ if (bluetoothState == State.UNINITIALIZED || bluetoothHeadset == null) {
+ return;
+ }
+ Log.d(TAG, "bluetoothTimeout: BT state=" + bluetoothState + ", "
+ + "attempts: " + scoConnectionAttempts + ", "
+ + "SCO is on: " + isScoOn());
+ if (bluetoothState != State.SCO_CONNECTING) {
+ return;
+ }
+ // Bluetooth SCO should be connecting; check the latest result.
+ boolean scoConnected = false;
+ List<BluetoothDevice> devices = bluetoothHeadset.getConnectedDevices();
+ if (devices.size() > 0) {
+ bluetoothDevice = devices.get(0);
+ if (bluetoothHeadset.isAudioConnected(bluetoothDevice)) {
+ Log.d(TAG, "SCO connected with " + bluetoothDevice.getName());
+ scoConnected = true;
+ } else {
+ Log.d(TAG, "SCO is not connected with " + bluetoothDevice.getName());
+ }
+ }
+ if (scoConnected) {
+ // We thought BT had timed out, but it's actually on; updating state.
+ bluetoothState = State.SCO_CONNECTED;
+ scoConnectionAttempts = 0;
+ } else {
+ // Give up and "cancel" our request by calling stopBluetoothSco().
+ Log.w(TAG, "BT failed to connect after timeout");
+ stopScoAudio();
+ }
+ updateAudioDeviceState();
+ Log.d(TAG, "bluetoothTimeout done: BT state=" + bluetoothState);
+ }
+
+ /** Checks whether audio uses Bluetooth SCO. */
+ private boolean isScoOn() {
+ return audioManager.isBluetoothScoOn();
+ }
+
+ /** Converts BluetoothAdapter states into local string representations. */
+ private String stateToString(int state) {
+ switch (state) {
+ case BluetoothAdapter.STATE_DISCONNECTED:
+ return "DISCONNECTED";
+ case BluetoothAdapter.STATE_CONNECTED:
+ return "CONNECTED";
+ case BluetoothAdapter.STATE_CONNECTING:
+ return "CONNECTING";
+ case BluetoothAdapter.STATE_DISCONNECTING:
+ return "DISCONNECTING";
+ case BluetoothAdapter.STATE_OFF:
+ return "OFF";
+ case BluetoothAdapter.STATE_ON:
+ return "ON";
+ case BluetoothAdapter.STATE_TURNING_OFF:
+ // Indicates the local Bluetooth adapter is turning off. Local clients should immediately
+ // attempt graceful disconnection of any remote links.
+ return "TURNING_OFF";
+ case BluetoothAdapter.STATE_TURNING_ON:
+ // Indicates the local Bluetooth adapter is turning on. However local clients should wait
+ // for STATE_ON before attempting to use the adapter.
+ return "TURNING_ON";
+ default:
+ return "INVALID";
+ }
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCClient.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCClient.java
new file mode 100644
index 0000000000..d5b7b4338e
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCClient.java
@@ -0,0 +1,137 @@
+/*
+ * Copyright 2013 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import org.webrtc.IceCandidate;
+import org.webrtc.PeerConnection;
+import org.webrtc.SessionDescription;
+
+import java.util.List;
+
+/**
+ * AppRTCClient is the interface representing an AppRTC client.
+ */
+public interface AppRTCClient {
+ /**
+ * Struct holding the connection parameters of an AppRTC room.
+ */
+ class RoomConnectionParameters {
+ public final String roomUrl;
+ public final String roomId;
+ public final boolean loopback;
+ public final String urlParameters;
+ public RoomConnectionParameters(
+ String roomUrl, String roomId, boolean loopback, String urlParameters) {
+ this.roomUrl = roomUrl;
+ this.roomId = roomId;
+ this.loopback = loopback;
+ this.urlParameters = urlParameters;
+ }
+ public RoomConnectionParameters(String roomUrl, String roomId, boolean loopback) {
+ this(roomUrl, roomId, loopback, null /* urlParameters */);
+ }
+ }
+
+ /**
+ * Asynchronously connect to an AppRTC room URL using supplied connection
+ * parameters. Once connection is established onConnectedToRoom()
+ * callback with room parameters is invoked.
+ */
+ void connectToRoom(RoomConnectionParameters connectionParameters);
+
+ /**
+ * Send offer SDP to the other participant.
+ */
+ void sendOfferSdp(final SessionDescription sdp);
+
+ /**
+ * Send answer SDP to the other participant.
+ */
+ void sendAnswerSdp(final SessionDescription sdp);
+
+ /**
+ * Send Ice candidate to the other participant.
+ */
+ void sendLocalIceCandidate(final IceCandidate candidate);
+
+ /**
+ * Send removed ICE candidates to the other participant.
+ */
+ void sendLocalIceCandidateRemovals(final IceCandidate[] candidates);
+
+ /**
+ * Disconnect from room.
+ */
+ void disconnectFromRoom();
+
+ /**
+ * Struct holding the signaling parameters of an AppRTC room.
+ */
+ class SignalingParameters {
+ public final List<PeerConnection.IceServer> iceServers;
+ public final boolean initiator;
+ public final String clientId;
+ public final String wssUrl;
+ public final String wssPostUrl;
+ public final SessionDescription offerSdp;
+ public final List<IceCandidate> iceCandidates;
+
+ public SignalingParameters(List<PeerConnection.IceServer> iceServers, boolean initiator,
+ String clientId, String wssUrl, String wssPostUrl, SessionDescription offerSdp,
+ List<IceCandidate> iceCandidates) {
+ this.iceServers = iceServers;
+ this.initiator = initiator;
+ this.clientId = clientId;
+ this.wssUrl = wssUrl;
+ this.wssPostUrl = wssPostUrl;
+ this.offerSdp = offerSdp;
+ this.iceCandidates = iceCandidates;
+ }
+ }
+
+ /**
+ * Callback interface for messages delivered on signaling channel.
+ *
+ * <p>Methods are guaranteed to be invoked on the UI thread of `activity`.
+ */
+ interface SignalingEvents {
+ /**
+ * Callback fired once the room's signaling parameters
+ * SignalingParameters are extracted.
+ */
+ void onConnectedToRoom(final SignalingParameters params);
+
+ /**
+ * Callback fired once remote SDP is received.
+ */
+ void onRemoteDescription(final SessionDescription sdp);
+
+ /**
+ * Callback fired once remote Ice candidate is received.
+ */
+ void onRemoteIceCandidate(final IceCandidate candidate);
+
+ /**
+ * Callback fired once remote Ice candidate removals are received.
+ */
+ void onRemoteIceCandidatesRemoved(final IceCandidate[] candidates);
+
+ /**
+ * Callback fired once channel is closed.
+ */
+ void onChannelClose();
+
+ /**
+ * Callback fired once channel error happened.
+ */
+ void onChannelError(final String description);
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCProximitySensor.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCProximitySensor.java
new file mode 100644
index 0000000000..604e2863d9
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCProximitySensor.java
@@ -0,0 +1,158 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.content.Context;
+import android.hardware.Sensor;
+import android.hardware.SensorEvent;
+import android.hardware.SensorEventListener;
+import android.hardware.SensorManager;
+import android.os.Build;
+import android.util.Log;
+import androidx.annotation.Nullable;
+import org.appspot.apprtc.util.AppRTCUtils;
+import org.webrtc.ThreadUtils;
+
+/**
+ * AppRTCProximitySensor manages functions related to the proximity sensor in
+ * the AppRTC demo.
+ * On most device, the proximity sensor is implemented as a boolean-sensor.
+ * It returns just two values "NEAR" or "FAR". Thresholding is done on the LUX
+ * value i.e. the LUX value of the light sensor is compared with a threshold.
+ * A LUX-value more than the threshold means the proximity sensor returns "FAR".
+ * Anything less than the threshold value and the sensor returns "NEAR".
+ */
+public class AppRTCProximitySensor implements SensorEventListener {
+ private static final String TAG = "AppRTCProximitySensor";
+
+ // This class should be created, started and stopped on one thread
+ // (e.g. the main thread). We use `nonThreadSafe` to ensure that this is
+ // the case. Only active when `DEBUG` is set to true.
+ private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
+
+ private final Runnable onSensorStateListener;
+ private final SensorManager sensorManager;
+ @Nullable private Sensor proximitySensor;
+ private boolean lastStateReportIsNear;
+
+ /** Construction */
+ static AppRTCProximitySensor create(Context context, Runnable sensorStateListener) {
+ return new AppRTCProximitySensor(context, sensorStateListener);
+ }
+
+ private AppRTCProximitySensor(Context context, Runnable sensorStateListener) {
+ Log.d(TAG, "AppRTCProximitySensor" + AppRTCUtils.getThreadInfo());
+ onSensorStateListener = sensorStateListener;
+ sensorManager = ((SensorManager) context.getSystemService(Context.SENSOR_SERVICE));
+ }
+
+ /**
+ * Activate the proximity sensor. Also do initialization if called for the
+ * first time.
+ */
+ public boolean start() {
+ threadChecker.checkIsOnValidThread();
+ Log.d(TAG, "start" + AppRTCUtils.getThreadInfo());
+ if (!initDefaultSensor()) {
+ // Proximity sensor is not supported on this device.
+ return false;
+ }
+ sensorManager.registerListener(this, proximitySensor, SensorManager.SENSOR_DELAY_NORMAL);
+ return true;
+ }
+
+ /** Deactivate the proximity sensor. */
+ public void stop() {
+ threadChecker.checkIsOnValidThread();
+ Log.d(TAG, "stop" + AppRTCUtils.getThreadInfo());
+ if (proximitySensor == null) {
+ return;
+ }
+ sensorManager.unregisterListener(this, proximitySensor);
+ }
+
+ /** Getter for last reported state. Set to true if "near" is reported. */
+ public boolean sensorReportsNearState() {
+ threadChecker.checkIsOnValidThread();
+ return lastStateReportIsNear;
+ }
+
+ @Override
+ public final void onAccuracyChanged(Sensor sensor, int accuracy) {
+ threadChecker.checkIsOnValidThread();
+ AppRTCUtils.assertIsTrue(sensor.getType() == Sensor.TYPE_PROXIMITY);
+ if (accuracy == SensorManager.SENSOR_STATUS_UNRELIABLE) {
+ Log.e(TAG, "The values returned by this sensor cannot be trusted");
+ }
+ }
+
+ @Override
+ public final void onSensorChanged(SensorEvent event) {
+ threadChecker.checkIsOnValidThread();
+ AppRTCUtils.assertIsTrue(event.sensor.getType() == Sensor.TYPE_PROXIMITY);
+ // As a best practice; do as little as possible within this method and
+ // avoid blocking.
+ float distanceInCentimeters = event.values[0];
+ if (distanceInCentimeters < proximitySensor.getMaximumRange()) {
+ Log.d(TAG, "Proximity sensor => NEAR state");
+ lastStateReportIsNear = true;
+ } else {
+ Log.d(TAG, "Proximity sensor => FAR state");
+ lastStateReportIsNear = false;
+ }
+
+ // Report about new state to listening client. Client can then call
+ // sensorReportsNearState() to query the current state (NEAR or FAR).
+ if (onSensorStateListener != null) {
+ onSensorStateListener.run();
+ }
+
+ Log.d(TAG, "onSensorChanged" + AppRTCUtils.getThreadInfo() + ": "
+ + "accuracy=" + event.accuracy + ", timestamp=" + event.timestamp + ", distance="
+ + event.values[0]);
+ }
+
+ /**
+ * Get default proximity sensor if it exists. Tablet devices (e.g. Nexus 7)
+ * does not support this type of sensor and false will be returned in such
+ * cases.
+ */
+ private boolean initDefaultSensor() {
+ if (proximitySensor != null) {
+ return true;
+ }
+ proximitySensor = sensorManager.getDefaultSensor(Sensor.TYPE_PROXIMITY);
+ if (proximitySensor == null) {
+ return false;
+ }
+ logProximitySensorInfo();
+ return true;
+ }
+
+ /** Helper method for logging information about the proximity sensor. */
+ private void logProximitySensorInfo() {
+ if (proximitySensor == null) {
+ return;
+ }
+ StringBuilder info = new StringBuilder("Proximity sensor: ");
+ info.append("name=").append(proximitySensor.getName());
+ info.append(", vendor: ").append(proximitySensor.getVendor());
+ info.append(", power: ").append(proximitySensor.getPower());
+ info.append(", resolution: ").append(proximitySensor.getResolution());
+ info.append(", max range: ").append(proximitySensor.getMaximumRange());
+ info.append(", min delay: ").append(proximitySensor.getMinDelay());
+ info.append(", type: ").append(proximitySensor.getStringType());
+ info.append(", max delay: ").append(proximitySensor.getMaxDelay());
+ info.append(", reporting mode: ").append(proximitySensor.getReportingMode());
+ info.append(", isWakeUpSensor: ").append(proximitySensor.isWakeUpSensor());
+ Log.d(TAG, info.toString());
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CallActivity.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CallActivity.java
new file mode 100644
index 0000000000..eb5ee8289e
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CallActivity.java
@@ -0,0 +1,962 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.annotation.TargetApi;
+import android.app.Activity;
+import android.app.AlertDialog;
+import android.app.FragmentTransaction;
+import android.content.Context;
+import android.content.DialogInterface;
+import android.content.Intent;
+import android.content.pm.PackageManager;
+import android.media.projection.MediaProjection;
+import android.media.projection.MediaProjectionManager;
+import android.net.Uri;
+import android.os.Build;
+import android.os.Bundle;
+import android.os.Handler;
+import android.util.DisplayMetrics;
+import android.util.Log;
+import android.view.View;
+import android.view.Window;
+import android.view.WindowManager;
+import android.view.WindowManager.LayoutParams;
+import android.widget.Toast;
+import androidx.annotation.Nullable;
+import java.io.IOException;
+import java.lang.RuntimeException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+import org.appspot.apprtc.AppRTCAudioManager.AudioDevice;
+import org.appspot.apprtc.AppRTCAudioManager.AudioManagerEvents;
+import org.appspot.apprtc.AppRTCClient.RoomConnectionParameters;
+import org.appspot.apprtc.AppRTCClient.SignalingParameters;
+import org.appspot.apprtc.PeerConnectionClient.DataChannelParameters;
+import org.appspot.apprtc.PeerConnectionClient.PeerConnectionParameters;
+import org.webrtc.Camera1Enumerator;
+import org.webrtc.Camera2Enumerator;
+import org.webrtc.CameraEnumerator;
+import org.webrtc.EglBase;
+import org.webrtc.FileVideoCapturer;
+import org.webrtc.IceCandidate;
+import org.webrtc.Logging;
+import org.webrtc.PeerConnectionFactory;
+import org.webrtc.RTCStatsReport;
+import org.webrtc.RendererCommon.ScalingType;
+import org.webrtc.ScreenCapturerAndroid;
+import org.webrtc.SessionDescription;
+import org.webrtc.SurfaceViewRenderer;
+import org.webrtc.VideoCapturer;
+import org.webrtc.VideoFileRenderer;
+import org.webrtc.VideoFrame;
+import org.webrtc.VideoSink;
+
+/**
+ * Activity for peer connection call setup, call waiting
+ * and call view.
+ */
+public class CallActivity extends Activity implements AppRTCClient.SignalingEvents,
+ PeerConnectionClient.PeerConnectionEvents,
+ CallFragment.OnCallEvents {
+ private static final String TAG = "CallRTCClient";
+
+ public static final String EXTRA_ROOMID = "org.appspot.apprtc.ROOMID";
+ public static final String EXTRA_URLPARAMETERS = "org.appspot.apprtc.URLPARAMETERS";
+ public static final String EXTRA_LOOPBACK = "org.appspot.apprtc.LOOPBACK";
+ public static final String EXTRA_VIDEO_CALL = "org.appspot.apprtc.VIDEO_CALL";
+ public static final String EXTRA_SCREENCAPTURE = "org.appspot.apprtc.SCREENCAPTURE";
+ public static final String EXTRA_CAMERA2 = "org.appspot.apprtc.CAMERA2";
+ public static final String EXTRA_VIDEO_WIDTH = "org.appspot.apprtc.VIDEO_WIDTH";
+ public static final String EXTRA_VIDEO_HEIGHT = "org.appspot.apprtc.VIDEO_HEIGHT";
+ public static final String EXTRA_VIDEO_FPS = "org.appspot.apprtc.VIDEO_FPS";
+ public static final String EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED =
+ "org.appsopt.apprtc.VIDEO_CAPTUREQUALITYSLIDER";
+ public static final String EXTRA_VIDEO_BITRATE = "org.appspot.apprtc.VIDEO_BITRATE";
+ public static final String EXTRA_VIDEOCODEC = "org.appspot.apprtc.VIDEOCODEC";
+ public static final String EXTRA_HWCODEC_ENABLED = "org.appspot.apprtc.HWCODEC";
+ public static final String EXTRA_CAPTURETOTEXTURE_ENABLED = "org.appspot.apprtc.CAPTURETOTEXTURE";
+ public static final String EXTRA_FLEXFEC_ENABLED = "org.appspot.apprtc.FLEXFEC";
+ public static final String EXTRA_AUDIO_BITRATE = "org.appspot.apprtc.AUDIO_BITRATE";
+ public static final String EXTRA_AUDIOCODEC = "org.appspot.apprtc.AUDIOCODEC";
+ public static final String EXTRA_NOAUDIOPROCESSING_ENABLED =
+ "org.appspot.apprtc.NOAUDIOPROCESSING";
+ public static final String EXTRA_AECDUMP_ENABLED = "org.appspot.apprtc.AECDUMP";
+ public static final String EXTRA_SAVE_INPUT_AUDIO_TO_FILE_ENABLED =
+ "org.appspot.apprtc.SAVE_INPUT_AUDIO_TO_FILE";
+ public static final String EXTRA_OPENSLES_ENABLED = "org.appspot.apprtc.OPENSLES";
+ public static final String EXTRA_DISABLE_BUILT_IN_AEC = "org.appspot.apprtc.DISABLE_BUILT_IN_AEC";
+ public static final String EXTRA_DISABLE_BUILT_IN_AGC = "org.appspot.apprtc.DISABLE_BUILT_IN_AGC";
+ public static final String EXTRA_DISABLE_BUILT_IN_NS = "org.appspot.apprtc.DISABLE_BUILT_IN_NS";
+ public static final String EXTRA_DISABLE_WEBRTC_AGC_AND_HPF =
+ "org.appspot.apprtc.DISABLE_WEBRTC_GAIN_CONTROL";
+ public static final String EXTRA_DISPLAY_HUD = "org.appspot.apprtc.DISPLAY_HUD";
+ public static final String EXTRA_TRACING = "org.appspot.apprtc.TRACING";
+ public static final String EXTRA_CMDLINE = "org.appspot.apprtc.CMDLINE";
+ public static final String EXTRA_RUNTIME = "org.appspot.apprtc.RUNTIME";
+ public static final String EXTRA_VIDEO_FILE_AS_CAMERA = "org.appspot.apprtc.VIDEO_FILE_AS_CAMERA";
+ public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE =
+ "org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE";
+ public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH =
+ "org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_WIDTH";
+ public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT =
+ "org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT";
+ public static final String EXTRA_USE_VALUES_FROM_INTENT =
+ "org.appspot.apprtc.USE_VALUES_FROM_INTENT";
+ public static final String EXTRA_DATA_CHANNEL_ENABLED = "org.appspot.apprtc.DATA_CHANNEL_ENABLED";
+ public static final String EXTRA_ORDERED = "org.appspot.apprtc.ORDERED";
+ public static final String EXTRA_MAX_RETRANSMITS_MS = "org.appspot.apprtc.MAX_RETRANSMITS_MS";
+ public static final String EXTRA_MAX_RETRANSMITS = "org.appspot.apprtc.MAX_RETRANSMITS";
+ public static final String EXTRA_PROTOCOL = "org.appspot.apprtc.PROTOCOL";
+ public static final String EXTRA_NEGOTIATED = "org.appspot.apprtc.NEGOTIATED";
+ public static final String EXTRA_ID = "org.appspot.apprtc.ID";
+ public static final String EXTRA_ENABLE_RTCEVENTLOG = "org.appspot.apprtc.ENABLE_RTCEVENTLOG";
+
+ private static final int CAPTURE_PERMISSION_REQUEST_CODE = 1;
+
+ // List of mandatory application permissions.
+ private static final String[] MANDATORY_PERMISSIONS = {"android.permission.MODIFY_AUDIO_SETTINGS",
+ "android.permission.RECORD_AUDIO", "android.permission.INTERNET"};
+
+ // Peer connection statistics callback period in ms.
+ private static final int STAT_CALLBACK_PERIOD = 1000;
+
+ private static class ProxyVideoSink implements VideoSink {
+ private VideoSink target;
+
+ @Override
+ synchronized public void onFrame(VideoFrame frame) {
+ if (target == null) {
+ Logging.d(TAG, "Dropping frame in proxy because target is null.");
+ return;
+ }
+
+ target.onFrame(frame);
+ }
+
+ synchronized public void setTarget(VideoSink target) {
+ this.target = target;
+ }
+ }
+
+ private final ProxyVideoSink remoteProxyRenderer = new ProxyVideoSink();
+ private final ProxyVideoSink localProxyVideoSink = new ProxyVideoSink();
+ @Nullable private PeerConnectionClient peerConnectionClient;
+ @Nullable
+ private AppRTCClient appRtcClient;
+ @Nullable
+ private SignalingParameters signalingParameters;
+ @Nullable private AppRTCAudioManager audioManager;
+ @Nullable
+ private SurfaceViewRenderer pipRenderer;
+ @Nullable
+ private SurfaceViewRenderer fullscreenRenderer;
+ @Nullable
+ private VideoFileRenderer videoFileRenderer;
+ private final List<VideoSink> remoteSinks = new ArrayList<>();
+ private Toast logToast;
+ private boolean commandLineRun;
+ private boolean activityRunning;
+ private RoomConnectionParameters roomConnectionParameters;
+ @Nullable
+ private PeerConnectionParameters peerConnectionParameters;
+ private boolean connected;
+ private boolean isError;
+ private boolean callControlFragmentVisible = true;
+ private long callStartedTimeMs;
+ private boolean micEnabled = true;
+ private boolean screencaptureEnabled;
+ private static Intent mediaProjectionPermissionResultData;
+ private static int mediaProjectionPermissionResultCode;
+ // True if local view is in the fullscreen renderer.
+ private boolean isSwappedFeeds;
+
+ // Controls
+ private CallFragment callFragment;
+ private HudFragment hudFragment;
+ private CpuMonitor cpuMonitor;
+
+ @Override
+ // TODO(bugs.webrtc.org/8580): LayoutParams.FLAG_TURN_SCREEN_ON and
+ // LayoutParams.FLAG_SHOW_WHEN_LOCKED are deprecated.
+ @SuppressWarnings("deprecation")
+ public void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ Thread.setDefaultUncaughtExceptionHandler(new UnhandledExceptionHandler(this));
+
+ // Set window styles for fullscreen-window size. Needs to be done before
+ // adding content.
+ requestWindowFeature(Window.FEATURE_NO_TITLE);
+ getWindow().addFlags(LayoutParams.FLAG_FULLSCREEN | LayoutParams.FLAG_KEEP_SCREEN_ON
+ | LayoutParams.FLAG_SHOW_WHEN_LOCKED | LayoutParams.FLAG_TURN_SCREEN_ON);
+ getWindow().getDecorView().setSystemUiVisibility(getSystemUiVisibility());
+ setContentView(R.layout.activity_call);
+
+ connected = false;
+ signalingParameters = null;
+
+ // Create UI controls.
+ pipRenderer = findViewById(R.id.pip_video_view);
+ fullscreenRenderer = findViewById(R.id.fullscreen_video_view);
+ callFragment = new CallFragment();
+ hudFragment = new HudFragment();
+
+ // Show/hide call control fragment on view click.
+ View.OnClickListener listener = new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ toggleCallControlFragmentVisibility();
+ }
+ };
+
+ // Swap feeds on pip view click.
+ pipRenderer.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ setSwappedFeeds(!isSwappedFeeds);
+ }
+ });
+
+ fullscreenRenderer.setOnClickListener(listener);
+ remoteSinks.add(remoteProxyRenderer);
+
+ final Intent intent = getIntent();
+ final EglBase eglBase = EglBase.create();
+
+ // Create video renderers.
+ pipRenderer.init(eglBase.getEglBaseContext(), null);
+ pipRenderer.setScalingType(ScalingType.SCALE_ASPECT_FIT);
+ String saveRemoteVideoToFile = intent.getStringExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE);
+
+ // When saveRemoteVideoToFile is set we save the video from the remote to a file.
+ if (saveRemoteVideoToFile != null) {
+ int videoOutWidth = intent.getIntExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, 0);
+ int videoOutHeight = intent.getIntExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, 0);
+ try {
+ videoFileRenderer = new VideoFileRenderer(
+ saveRemoteVideoToFile, videoOutWidth, videoOutHeight, eglBase.getEglBaseContext());
+ remoteSinks.add(videoFileRenderer);
+ } catch (IOException e) {
+ throw new RuntimeException(
+ "Failed to open video file for output: " + saveRemoteVideoToFile, e);
+ }
+ }
+ fullscreenRenderer.init(eglBase.getEglBaseContext(), null);
+ fullscreenRenderer.setScalingType(ScalingType.SCALE_ASPECT_FILL);
+
+ pipRenderer.setZOrderMediaOverlay(true);
+ pipRenderer.setEnableHardwareScaler(true /* enabled */);
+ fullscreenRenderer.setEnableHardwareScaler(false /* enabled */);
+ // Start with local feed in fullscreen and swap it to the pip when the call is connected.
+ setSwappedFeeds(true /* isSwappedFeeds */);
+
+ // Check for mandatory permissions.
+ for (String permission : MANDATORY_PERMISSIONS) {
+ if (checkCallingOrSelfPermission(permission) != PackageManager.PERMISSION_GRANTED) {
+ logAndToast("Permission " + permission + " is not granted");
+ setResult(RESULT_CANCELED);
+ finish();
+ return;
+ }
+ }
+
+ Uri roomUri = intent.getData();
+ if (roomUri == null) {
+ logAndToast(getString(R.string.missing_url));
+ Log.e(TAG, "Didn't get any URL in intent!");
+ setResult(RESULT_CANCELED);
+ finish();
+ return;
+ }
+
+ // Get Intent parameters.
+ String roomId = intent.getStringExtra(EXTRA_ROOMID);
+ Log.d(TAG, "Room ID: " + roomId);
+ if (roomId == null || roomId.length() == 0) {
+ logAndToast(getString(R.string.missing_url));
+ Log.e(TAG, "Incorrect room ID in intent!");
+ setResult(RESULT_CANCELED);
+ finish();
+ return;
+ }
+
+ boolean loopback = intent.getBooleanExtra(EXTRA_LOOPBACK, false);
+ boolean tracing = intent.getBooleanExtra(EXTRA_TRACING, false);
+
+ int videoWidth = intent.getIntExtra(EXTRA_VIDEO_WIDTH, 0);
+ int videoHeight = intent.getIntExtra(EXTRA_VIDEO_HEIGHT, 0);
+
+ screencaptureEnabled = intent.getBooleanExtra(EXTRA_SCREENCAPTURE, false);
+ // If capturing format is not specified for screencapture, use screen resolution.
+ if (screencaptureEnabled && videoWidth == 0 && videoHeight == 0) {
+ DisplayMetrics displayMetrics = getDisplayMetrics();
+ videoWidth = displayMetrics.widthPixels;
+ videoHeight = displayMetrics.heightPixels;
+ }
+ DataChannelParameters dataChannelParameters = null;
+ if (intent.getBooleanExtra(EXTRA_DATA_CHANNEL_ENABLED, false)) {
+ dataChannelParameters = new DataChannelParameters(intent.getBooleanExtra(EXTRA_ORDERED, true),
+ intent.getIntExtra(EXTRA_MAX_RETRANSMITS_MS, -1),
+ intent.getIntExtra(EXTRA_MAX_RETRANSMITS, -1), intent.getStringExtra(EXTRA_PROTOCOL),
+ intent.getBooleanExtra(EXTRA_NEGOTIATED, false), intent.getIntExtra(EXTRA_ID, -1));
+ }
+ peerConnectionParameters =
+ new PeerConnectionParameters(intent.getBooleanExtra(EXTRA_VIDEO_CALL, true), loopback,
+ tracing, videoWidth, videoHeight, intent.getIntExtra(EXTRA_VIDEO_FPS, 0),
+ intent.getIntExtra(EXTRA_VIDEO_BITRATE, 0), intent.getStringExtra(EXTRA_VIDEOCODEC),
+ intent.getBooleanExtra(EXTRA_HWCODEC_ENABLED, true),
+ intent.getBooleanExtra(EXTRA_FLEXFEC_ENABLED, false),
+ intent.getIntExtra(EXTRA_AUDIO_BITRATE, 0), intent.getStringExtra(EXTRA_AUDIOCODEC),
+ intent.getBooleanExtra(EXTRA_NOAUDIOPROCESSING_ENABLED, false),
+ intent.getBooleanExtra(EXTRA_AECDUMP_ENABLED, false),
+ intent.getBooleanExtra(EXTRA_SAVE_INPUT_AUDIO_TO_FILE_ENABLED, false),
+ intent.getBooleanExtra(EXTRA_OPENSLES_ENABLED, false),
+ intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_AEC, false),
+ intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_AGC, false),
+ intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_NS, false),
+ intent.getBooleanExtra(EXTRA_DISABLE_WEBRTC_AGC_AND_HPF, false),
+ intent.getBooleanExtra(EXTRA_ENABLE_RTCEVENTLOG, false), dataChannelParameters);
+ commandLineRun = intent.getBooleanExtra(EXTRA_CMDLINE, false);
+ int runTimeMs = intent.getIntExtra(EXTRA_RUNTIME, 0);
+
+ Log.d(TAG, "VIDEO_FILE: '" + intent.getStringExtra(EXTRA_VIDEO_FILE_AS_CAMERA) + "'");
+
+ // Create connection client. Use DirectRTCClient if room name is an IP otherwise use the
+ // standard WebSocketRTCClient.
+ if (loopback || !DirectRTCClient.IP_PATTERN.matcher(roomId).matches()) {
+ appRtcClient = new WebSocketRTCClient(this);
+ } else {
+ Log.i(TAG, "Using DirectRTCClient because room name looks like an IP.");
+ appRtcClient = new DirectRTCClient(this);
+ }
+ // Create connection parameters.
+ String urlParameters = intent.getStringExtra(EXTRA_URLPARAMETERS);
+ roomConnectionParameters =
+ new RoomConnectionParameters(roomUri.toString(), roomId, loopback, urlParameters);
+
+ // Create CPU monitor
+ if (CpuMonitor.isSupported()) {
+ cpuMonitor = new CpuMonitor(this);
+ hudFragment.setCpuMonitor(cpuMonitor);
+ }
+
+ // Send intent arguments to fragments.
+ callFragment.setArguments(intent.getExtras());
+ hudFragment.setArguments(intent.getExtras());
+ // Activate call and HUD fragments and start the call.
+ FragmentTransaction ft = getFragmentManager().beginTransaction();
+ ft.add(R.id.call_fragment_container, callFragment);
+ ft.add(R.id.hud_fragment_container, hudFragment);
+ ft.commit();
+
+ // For command line execution run connection for <runTimeMs> and exit.
+ if (commandLineRun && runTimeMs > 0) {
+ (new Handler()).postDelayed(new Runnable() {
+ @Override
+ public void run() {
+ disconnect();
+ }
+ }, runTimeMs);
+ }
+
+ // Create peer connection client.
+ peerConnectionClient = new PeerConnectionClient(
+ getApplicationContext(), eglBase, peerConnectionParameters, CallActivity.this);
+ PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
+ if (loopback) {
+ options.networkIgnoreMask = 0;
+ }
+ peerConnectionClient.createPeerConnectionFactory(options);
+
+ if (screencaptureEnabled) {
+ startScreenCapture();
+ } else {
+ startCall();
+ }
+ }
+
+ private DisplayMetrics getDisplayMetrics() {
+ DisplayMetrics displayMetrics = new DisplayMetrics();
+ WindowManager windowManager =
+ (WindowManager) getApplication().getSystemService(Context.WINDOW_SERVICE);
+ windowManager.getDefaultDisplay().getRealMetrics(displayMetrics);
+ return displayMetrics;
+ }
+
+ private static int getSystemUiVisibility() {
+ return View.SYSTEM_UI_FLAG_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_FULLSCREEN
+ | View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY;
+ }
+
+ private void startScreenCapture() {
+ MediaProjectionManager mediaProjectionManager =
+ (MediaProjectionManager) getApplication().getSystemService(
+ Context.MEDIA_PROJECTION_SERVICE);
+ startActivityForResult(
+ mediaProjectionManager.createScreenCaptureIntent(), CAPTURE_PERMISSION_REQUEST_CODE);
+ }
+
+ @Override
+ public void onActivityResult(int requestCode, int resultCode, Intent data) {
+ if (requestCode != CAPTURE_PERMISSION_REQUEST_CODE)
+ return;
+ mediaProjectionPermissionResultCode = resultCode;
+ mediaProjectionPermissionResultData = data;
+ startCall();
+ }
+
+ private boolean useCamera2() {
+ return Camera2Enumerator.isSupported(this) && getIntent().getBooleanExtra(EXTRA_CAMERA2, true);
+ }
+
+ private boolean captureToTexture() {
+ return getIntent().getBooleanExtra(EXTRA_CAPTURETOTEXTURE_ENABLED, false);
+ }
+
+ private @Nullable VideoCapturer createCameraCapturer(CameraEnumerator enumerator) {
+ final String[] deviceNames = enumerator.getDeviceNames();
+
+ // First, try to find front facing camera
+ Logging.d(TAG, "Looking for front facing cameras.");
+ for (String deviceName : deviceNames) {
+ if (enumerator.isFrontFacing(deviceName)) {
+ Logging.d(TAG, "Creating front facing camera capturer.");
+ VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
+
+ if (videoCapturer != null) {
+ return videoCapturer;
+ }
+ }
+ }
+
+ // Front facing camera not found, try something else
+ Logging.d(TAG, "Looking for other cameras.");
+ for (String deviceName : deviceNames) {
+ if (!enumerator.isFrontFacing(deviceName)) {
+ Logging.d(TAG, "Creating other camera capturer.");
+ VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
+
+ if (videoCapturer != null) {
+ return videoCapturer;
+ }
+ }
+ }
+
+ return null;
+ }
+
+ private @Nullable VideoCapturer createScreenCapturer() {
+ if (mediaProjectionPermissionResultCode != Activity.RESULT_OK) {
+ reportError("User didn't give permission to capture the screen.");
+ return null;
+ }
+ return new ScreenCapturerAndroid(
+ mediaProjectionPermissionResultData, new MediaProjection.Callback() {
+ @Override
+ public void onStop() {
+ reportError("User revoked permission to capture the screen.");
+ }
+ });
+ }
+
+ // Activity interfaces
+ @Override
+ public void onStop() {
+ super.onStop();
+ activityRunning = false;
+ // Don't stop the video when using screencapture to allow user to show other apps to the remote
+ // end.
+ if (peerConnectionClient != null && !screencaptureEnabled) {
+ peerConnectionClient.stopVideoSource();
+ }
+ if (cpuMonitor != null) {
+ cpuMonitor.pause();
+ }
+ }
+
+ @Override
+ public void onStart() {
+ super.onStart();
+ activityRunning = true;
+ // Video is not paused for screencapture. See onPause.
+ if (peerConnectionClient != null && !screencaptureEnabled) {
+ peerConnectionClient.startVideoSource();
+ }
+ if (cpuMonitor != null) {
+ cpuMonitor.resume();
+ }
+ }
+
+ @Override
+ protected void onDestroy() {
+ Thread.setDefaultUncaughtExceptionHandler(null);
+ disconnect();
+ if (logToast != null) {
+ logToast.cancel();
+ }
+ activityRunning = false;
+ super.onDestroy();
+ }
+
+ // CallFragment.OnCallEvents interface implementation.
+ @Override
+ public void onCallHangUp() {
+ disconnect();
+ }
+
+ @Override
+ public void onCameraSwitch() {
+ if (peerConnectionClient != null) {
+ peerConnectionClient.switchCamera();
+ }
+ }
+
+ @Override
+ public void onVideoScalingSwitch(ScalingType scalingType) {
+ fullscreenRenderer.setScalingType(scalingType);
+ }
+
+ @Override
+ public void onCaptureFormatChange(int width, int height, int framerate) {
+ if (peerConnectionClient != null) {
+ peerConnectionClient.changeCaptureFormat(width, height, framerate);
+ }
+ }
+
+ @Override
+ public boolean onToggleMic() {
+ if (peerConnectionClient != null) {
+ micEnabled = !micEnabled;
+ peerConnectionClient.setAudioEnabled(micEnabled);
+ }
+ return micEnabled;
+ }
+
+ // Helper functions.
+ private void toggleCallControlFragmentVisibility() {
+ if (!connected || !callFragment.isAdded()) {
+ return;
+ }
+ // Show/hide call control fragment
+ callControlFragmentVisible = !callControlFragmentVisible;
+ FragmentTransaction ft = getFragmentManager().beginTransaction();
+ if (callControlFragmentVisible) {
+ ft.show(callFragment);
+ ft.show(hudFragment);
+ } else {
+ ft.hide(callFragment);
+ ft.hide(hudFragment);
+ }
+ ft.setTransition(FragmentTransaction.TRANSIT_FRAGMENT_FADE);
+ ft.commit();
+ }
+
+ private void startCall() {
+ if (appRtcClient == null) {
+ Log.e(TAG, "AppRTC client is not allocated for a call.");
+ return;
+ }
+ callStartedTimeMs = System.currentTimeMillis();
+
+ // Start room connection.
+ logAndToast(getString(R.string.connecting_to, roomConnectionParameters.roomUrl));
+ appRtcClient.connectToRoom(roomConnectionParameters);
+
+ // Create and audio manager that will take care of audio routing,
+ // audio modes, audio device enumeration etc.
+ audioManager = AppRTCAudioManager.create(getApplicationContext());
+ // Store existing audio settings and change audio mode to
+ // MODE_IN_COMMUNICATION for best possible VoIP performance.
+ Log.d(TAG, "Starting the audio manager...");
+ audioManager.start(new AudioManagerEvents() {
+ // This method will be called each time the number of available audio
+ // devices has changed.
+ @Override
+ public void onAudioDeviceChanged(
+ AudioDevice audioDevice, Set<AudioDevice> availableAudioDevices) {
+ onAudioManagerDevicesChanged(audioDevice, availableAudioDevices);
+ }
+ });
+ }
+
+ // Should be called from UI thread
+ private void callConnected() {
+ final long delta = System.currentTimeMillis() - callStartedTimeMs;
+ Log.i(TAG, "Call connected: delay=" + delta + "ms");
+ if (peerConnectionClient == null || isError) {
+ Log.w(TAG, "Call is connected in closed or error state");
+ return;
+ }
+ // Enable statistics callback.
+ peerConnectionClient.enableStatsEvents(true, STAT_CALLBACK_PERIOD);
+ setSwappedFeeds(false /* isSwappedFeeds */);
+ }
+
+ // This method is called when the audio manager reports audio device change,
+ // e.g. from wired headset to speakerphone.
+ private void onAudioManagerDevicesChanged(
+ final AudioDevice device, final Set<AudioDevice> availableDevices) {
+ Log.d(TAG, "onAudioManagerDevicesChanged: " + availableDevices + ", "
+ + "selected: " + device);
+ // TODO(henrika): add callback handler.
+ }
+
+ // Disconnect from remote resources, dispose of local resources, and exit.
+ private void disconnect() {
+ activityRunning = false;
+ remoteProxyRenderer.setTarget(null);
+ localProxyVideoSink.setTarget(null);
+ if (appRtcClient != null) {
+ appRtcClient.disconnectFromRoom();
+ appRtcClient = null;
+ }
+ if (pipRenderer != null) {
+ pipRenderer.release();
+ pipRenderer = null;
+ }
+ if (videoFileRenderer != null) {
+ videoFileRenderer.release();
+ videoFileRenderer = null;
+ }
+ if (fullscreenRenderer != null) {
+ fullscreenRenderer.release();
+ fullscreenRenderer = null;
+ }
+ if (peerConnectionClient != null) {
+ peerConnectionClient.close();
+ peerConnectionClient = null;
+ }
+ if (audioManager != null) {
+ audioManager.stop();
+ audioManager = null;
+ }
+ if (connected && !isError) {
+ setResult(RESULT_OK);
+ } else {
+ setResult(RESULT_CANCELED);
+ }
+ finish();
+ }
+
+ private void disconnectWithErrorMessage(final String errorMessage) {
+ if (commandLineRun || !activityRunning) {
+ Log.e(TAG, "Critical error: " + errorMessage);
+ disconnect();
+ } else {
+ new AlertDialog.Builder(this)
+ .setTitle(getText(R.string.channel_error_title))
+ .setMessage(errorMessage)
+ .setCancelable(false)
+ .setNeutralButton(R.string.ok,
+ new DialogInterface.OnClickListener() {
+ @Override
+ public void onClick(DialogInterface dialog, int id) {
+ dialog.cancel();
+ disconnect();
+ }
+ })
+ .create()
+ .show();
+ }
+ }
+
+ // Log `msg` and Toast about it.
+ private void logAndToast(String msg) {
+ Log.d(TAG, msg);
+ if (logToast != null) {
+ logToast.cancel();
+ }
+ logToast = Toast.makeText(this, msg, Toast.LENGTH_SHORT);
+ logToast.show();
+ }
+
+ private void reportError(final String description) {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ if (!isError) {
+ isError = true;
+ disconnectWithErrorMessage(description);
+ }
+ }
+ });
+ }
+
+ private @Nullable VideoCapturer createVideoCapturer() {
+ final VideoCapturer videoCapturer;
+ String videoFileAsCamera = getIntent().getStringExtra(EXTRA_VIDEO_FILE_AS_CAMERA);
+ if (videoFileAsCamera != null) {
+ try {
+ videoCapturer = new FileVideoCapturer(videoFileAsCamera);
+ } catch (IOException e) {
+ reportError("Failed to open video file for emulated camera");
+ return null;
+ }
+ } else if (screencaptureEnabled) {
+ return createScreenCapturer();
+ } else if (useCamera2()) {
+ if (!captureToTexture()) {
+ reportError(getString(R.string.camera2_texture_only_error));
+ return null;
+ }
+
+ Logging.d(TAG, "Creating capturer using camera2 API.");
+ videoCapturer = createCameraCapturer(new Camera2Enumerator(this));
+ } else {
+ Logging.d(TAG, "Creating capturer using camera1 API.");
+ videoCapturer = createCameraCapturer(new Camera1Enumerator(captureToTexture()));
+ }
+ if (videoCapturer == null) {
+ reportError("Failed to open camera");
+ return null;
+ }
+ return videoCapturer;
+ }
+
+ private void setSwappedFeeds(boolean isSwappedFeeds) {
+ Logging.d(TAG, "setSwappedFeeds: " + isSwappedFeeds);
+ this.isSwappedFeeds = isSwappedFeeds;
+ localProxyVideoSink.setTarget(isSwappedFeeds ? fullscreenRenderer : pipRenderer);
+ remoteProxyRenderer.setTarget(isSwappedFeeds ? pipRenderer : fullscreenRenderer);
+ fullscreenRenderer.setMirror(isSwappedFeeds);
+ pipRenderer.setMirror(!isSwappedFeeds);
+ }
+
+ // -----Implementation of AppRTCClient.AppRTCSignalingEvents ---------------
+ // All callbacks are invoked from websocket signaling looper thread and
+ // are routed to UI thread.
+ private void onConnectedToRoomInternal(final SignalingParameters params) {
+ final long delta = System.currentTimeMillis() - callStartedTimeMs;
+
+ signalingParameters = params;
+ logAndToast("Creating peer connection, delay=" + delta + "ms");
+ VideoCapturer videoCapturer = null;
+ if (peerConnectionParameters.videoCallEnabled) {
+ videoCapturer = createVideoCapturer();
+ }
+ peerConnectionClient.createPeerConnection(
+ localProxyVideoSink, remoteSinks, videoCapturer, signalingParameters);
+
+ if (signalingParameters.initiator) {
+ logAndToast("Creating OFFER...");
+ // Create offer. Offer SDP will be sent to answering client in
+ // PeerConnectionEvents.onLocalDescription event.
+ peerConnectionClient.createOffer();
+ } else {
+ if (params.offerSdp != null) {
+ peerConnectionClient.setRemoteDescription(params.offerSdp);
+ logAndToast("Creating ANSWER...");
+ // Create answer. Answer SDP will be sent to offering client in
+ // PeerConnectionEvents.onLocalDescription event.
+ peerConnectionClient.createAnswer();
+ }
+ if (params.iceCandidates != null) {
+ // Add remote ICE candidates from room.
+ for (IceCandidate iceCandidate : params.iceCandidates) {
+ peerConnectionClient.addRemoteIceCandidate(iceCandidate);
+ }
+ }
+ }
+ }
+
+ @Override
+ public void onConnectedToRoom(final SignalingParameters params) {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ onConnectedToRoomInternal(params);
+ }
+ });
+ }
+
+ @Override
+ public void onRemoteDescription(final SessionDescription desc) {
+ final long delta = System.currentTimeMillis() - callStartedTimeMs;
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ if (peerConnectionClient == null) {
+ Log.e(TAG, "Received remote SDP for non-initilized peer connection.");
+ return;
+ }
+ logAndToast("Received remote " + desc.type + ", delay=" + delta + "ms");
+ peerConnectionClient.setRemoteDescription(desc);
+ if (!signalingParameters.initiator) {
+ logAndToast("Creating ANSWER...");
+ // Create answer. Answer SDP will be sent to offering client in
+ // PeerConnectionEvents.onLocalDescription event.
+ peerConnectionClient.createAnswer();
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onRemoteIceCandidate(final IceCandidate candidate) {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ if (peerConnectionClient == null) {
+ Log.e(TAG, "Received ICE candidate for a non-initialized peer connection.");
+ return;
+ }
+ peerConnectionClient.addRemoteIceCandidate(candidate);
+ }
+ });
+ }
+
+ @Override
+ public void onRemoteIceCandidatesRemoved(final IceCandidate[] candidates) {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ if (peerConnectionClient == null) {
+ Log.e(TAG, "Received ICE candidate removals for a non-initialized peer connection.");
+ return;
+ }
+ peerConnectionClient.removeRemoteIceCandidates(candidates);
+ }
+ });
+ }
+
+ @Override
+ public void onChannelClose() {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ logAndToast("Remote end hung up; dropping PeerConnection");
+ disconnect();
+ }
+ });
+ }
+
+ @Override
+ public void onChannelError(final String description) {
+ reportError(description);
+ }
+
+ // -----Implementation of PeerConnectionClient.PeerConnectionEvents.---------
+ // Send local peer connection SDP and ICE candidates to remote party.
+ // All callbacks are invoked from peer connection client looper thread and
+ // are routed to UI thread.
+ @Override
+ public void onLocalDescription(final SessionDescription desc) {
+ final long delta = System.currentTimeMillis() - callStartedTimeMs;
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ if (appRtcClient != null) {
+ logAndToast("Sending " + desc.type + ", delay=" + delta + "ms");
+ if (signalingParameters.initiator) {
+ appRtcClient.sendOfferSdp(desc);
+ } else {
+ appRtcClient.sendAnswerSdp(desc);
+ }
+ }
+ if (peerConnectionParameters.videoMaxBitrate > 0) {
+ Log.d(TAG, "Set video maximum bitrate: " + peerConnectionParameters.videoMaxBitrate);
+ peerConnectionClient.setVideoMaxBitrate(peerConnectionParameters.videoMaxBitrate);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onIceCandidate(final IceCandidate candidate) {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ if (appRtcClient != null) {
+ appRtcClient.sendLocalIceCandidate(candidate);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onIceCandidatesRemoved(final IceCandidate[] candidates) {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ if (appRtcClient != null) {
+ appRtcClient.sendLocalIceCandidateRemovals(candidates);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onIceConnected() {
+ final long delta = System.currentTimeMillis() - callStartedTimeMs;
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ logAndToast("ICE connected, delay=" + delta + "ms");
+ }
+ });
+ }
+
+ @Override
+ public void onIceDisconnected() {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ logAndToast("ICE disconnected");
+ }
+ });
+ }
+
+ @Override
+ public void onConnected() {
+ final long delta = System.currentTimeMillis() - callStartedTimeMs;
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ logAndToast("DTLS connected, delay=" + delta + "ms");
+ connected = true;
+ callConnected();
+ }
+ });
+ }
+
+ @Override
+ public void onDisconnected() {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ logAndToast("DTLS disconnected");
+ connected = false;
+ disconnect();
+ }
+ });
+ }
+
+ @Override
+ public void onPeerConnectionClosed() {}
+
+ @Override
+ public void onPeerConnectionStatsReady(final RTCStatsReport report) {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ if (!isError && connected) {
+ hudFragment.updateEncoderStatistics(report);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onPeerConnectionError(final String description) {
+ reportError(description);
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CallFragment.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CallFragment.java
new file mode 100644
index 0000000000..0d8bdaa06f
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CallFragment.java
@@ -0,0 +1,137 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.app.Activity;
+import android.app.Fragment;
+import android.os.Bundle;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.ImageButton;
+import android.widget.SeekBar;
+import android.widget.TextView;
+
+import org.webrtc.RendererCommon.ScalingType;
+
+/**
+ * Fragment for call control.
+ */
+public class CallFragment extends Fragment {
+ private TextView contactView;
+ private ImageButton cameraSwitchButton;
+ private ImageButton videoScalingButton;
+ private ImageButton toggleMuteButton;
+ private TextView captureFormatText;
+ private SeekBar captureFormatSlider;
+ private OnCallEvents callEvents;
+ private ScalingType scalingType;
+ private boolean videoCallEnabled = true;
+
+ /**
+ * Call control interface for container activity.
+ */
+ public interface OnCallEvents {
+ void onCallHangUp();
+ void onCameraSwitch();
+ void onVideoScalingSwitch(ScalingType scalingType);
+ void onCaptureFormatChange(int width, int height, int framerate);
+ boolean onToggleMic();
+ }
+
+ @Override
+ public View onCreateView(
+ LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
+ View controlView = inflater.inflate(R.layout.fragment_call, container, false);
+
+ // Create UI controls.
+ contactView = controlView.findViewById(R.id.contact_name_call);
+ ImageButton disconnectButton = controlView.findViewById(R.id.button_call_disconnect);
+ cameraSwitchButton = controlView.findViewById(R.id.button_call_switch_camera);
+ videoScalingButton = controlView.findViewById(R.id.button_call_scaling_mode);
+ toggleMuteButton = controlView.findViewById(R.id.button_call_toggle_mic);
+ captureFormatText = controlView.findViewById(R.id.capture_format_text_call);
+ captureFormatSlider = controlView.findViewById(R.id.capture_format_slider_call);
+
+ // Add buttons click events.
+ disconnectButton.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ callEvents.onCallHangUp();
+ }
+ });
+
+ cameraSwitchButton.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ callEvents.onCameraSwitch();
+ }
+ });
+
+ videoScalingButton.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ if (scalingType == ScalingType.SCALE_ASPECT_FILL) {
+ videoScalingButton.setBackgroundResource(R.drawable.ic_action_full_screen);
+ scalingType = ScalingType.SCALE_ASPECT_FIT;
+ } else {
+ videoScalingButton.setBackgroundResource(R.drawable.ic_action_return_from_full_screen);
+ scalingType = ScalingType.SCALE_ASPECT_FILL;
+ }
+ callEvents.onVideoScalingSwitch(scalingType);
+ }
+ });
+ scalingType = ScalingType.SCALE_ASPECT_FILL;
+
+ toggleMuteButton.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ boolean enabled = callEvents.onToggleMic();
+ toggleMuteButton.setAlpha(enabled ? 1.0f : 0.3f);
+ }
+ });
+
+ return controlView;
+ }
+
+ @Override
+ public void onStart() {
+ super.onStart();
+
+ boolean captureSliderEnabled = false;
+ Bundle args = getArguments();
+ if (args != null) {
+ String contactName = args.getString(CallActivity.EXTRA_ROOMID);
+ contactView.setText(contactName);
+ videoCallEnabled = args.getBoolean(CallActivity.EXTRA_VIDEO_CALL, true);
+ captureSliderEnabled = videoCallEnabled
+ && args.getBoolean(CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED, false);
+ }
+ if (!videoCallEnabled) {
+ cameraSwitchButton.setVisibility(View.INVISIBLE);
+ }
+ if (captureSliderEnabled) {
+ captureFormatSlider.setOnSeekBarChangeListener(
+ new CaptureQualityController(captureFormatText, callEvents));
+ } else {
+ captureFormatText.setVisibility(View.GONE);
+ captureFormatSlider.setVisibility(View.GONE);
+ }
+ }
+
+ // TODO(sakal): Replace with onAttach(Context) once we only support API level 23+.
+ @SuppressWarnings("deprecation")
+ @Override
+ public void onAttach(Activity activity) {
+ super.onAttach(activity);
+ callEvents = (OnCallEvents) activity;
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CaptureQualityController.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CaptureQualityController.java
new file mode 100644
index 0000000000..8a783eca9c
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CaptureQualityController.java
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.widget.SeekBar;
+import android.widget.TextView;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+/**
+ * Control capture format based on a seekbar listener.
+ */
+public class CaptureQualityController implements SeekBar.OnSeekBarChangeListener {
+ private final List<CaptureFormat> formats =
+ Arrays.asList(new CaptureFormat(1280, 720, 0, 30000), new CaptureFormat(960, 540, 0, 30000),
+ new CaptureFormat(640, 480, 0, 30000), new CaptureFormat(480, 360, 0, 30000),
+ new CaptureFormat(320, 240, 0, 30000), new CaptureFormat(256, 144, 0, 30000));
+ // Prioritize framerate below this threshold and resolution above the threshold.
+ private static final int FRAMERATE_THRESHOLD = 15;
+ private TextView captureFormatText;
+ private CallFragment.OnCallEvents callEvents;
+ private int width;
+ private int height;
+ private int framerate;
+ private double targetBandwidth;
+
+ public CaptureQualityController(
+ TextView captureFormatText, CallFragment.OnCallEvents callEvents) {
+ this.captureFormatText = captureFormatText;
+ this.callEvents = callEvents;
+ }
+
+ private final Comparator<CaptureFormat> compareFormats = new Comparator<CaptureFormat>() {
+ @Override
+ public int compare(CaptureFormat first, CaptureFormat second) {
+ int firstFps = calculateFramerate(targetBandwidth, first);
+ int secondFps = calculateFramerate(targetBandwidth, second);
+
+ if ((firstFps >= FRAMERATE_THRESHOLD && secondFps >= FRAMERATE_THRESHOLD)
+ || firstFps == secondFps) {
+ // Compare resolution.
+ return first.width * first.height - second.width * second.height;
+ } else {
+ // Compare fps.
+ return firstFps - secondFps;
+ }
+ }
+ };
+
+ @Override
+ public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
+ if (progress == 0) {
+ width = 0;
+ height = 0;
+ framerate = 0;
+ captureFormatText.setText(R.string.muted);
+ return;
+ }
+
+ // Extract max bandwidth (in millipixels / second).
+ long maxCaptureBandwidth = java.lang.Long.MIN_VALUE;
+ for (CaptureFormat format : formats) {
+ maxCaptureBandwidth =
+ Math.max(maxCaptureBandwidth, (long) format.width * format.height * format.framerate.max);
+ }
+
+ // Fraction between 0 and 1.
+ double bandwidthFraction = (double) progress / 100.0;
+ // Make a log-scale transformation, still between 0 and 1.
+ final double kExpConstant = 3.0;
+ bandwidthFraction =
+ (Math.exp(kExpConstant * bandwidthFraction) - 1) / (Math.exp(kExpConstant) - 1);
+ targetBandwidth = bandwidthFraction * maxCaptureBandwidth;
+
+ // Choose the best format given a target bandwidth.
+ final CaptureFormat bestFormat = Collections.max(formats, compareFormats);
+ width = bestFormat.width;
+ height = bestFormat.height;
+ framerate = calculateFramerate(targetBandwidth, bestFormat);
+ captureFormatText.setText(
+ String.format(captureFormatText.getContext().getString(R.string.format_description), width,
+ height, framerate));
+ }
+
+ @Override
+ public void onStartTrackingTouch(SeekBar seekBar) {}
+
+ @Override
+ public void onStopTrackingTouch(SeekBar seekBar) {
+ callEvents.onCaptureFormatChange(width, height, framerate);
+ }
+
+ // Return the highest frame rate possible based on bandwidth and format.
+ private int calculateFramerate(double bandwidth, CaptureFormat format) {
+ return (int) Math.round(
+ Math.min(format.framerate.max, (int) Math.round(bandwidth / (format.width * format.height)))
+ / 1000.0);
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java
new file mode 100644
index 0000000000..7206c88498
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java
@@ -0,0 +1,666 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.annotation.TargetApi;
+import android.app.Activity;
+import android.app.AlertDialog;
+import android.content.DialogInterface;
+import android.content.Intent;
+import android.content.SharedPreferences;
+import android.content.pm.PackageInfo;
+import android.content.pm.PackageManager;
+import android.net.Uri;
+import android.os.Build;
+import android.os.Bundle;
+import android.preference.PreferenceManager;
+import android.util.Log;
+import android.view.ContextMenu;
+import android.view.KeyEvent;
+import android.view.Menu;
+import android.view.MenuItem;
+import android.view.View;
+import android.view.View.OnClickListener;
+import android.view.inputmethod.EditorInfo;
+import android.webkit.URLUtil;
+import android.widget.AdapterView;
+import android.widget.ArrayAdapter;
+import android.widget.EditText;
+import android.widget.ImageButton;
+import android.widget.ListView;
+import android.widget.TextView;
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.Random;
+import org.json.JSONArray;
+import org.json.JSONException;
+
+/**
+ * Handles the initial setup where the user selects which room to join.
+ */
+public class ConnectActivity extends Activity {
+ private static final String TAG = "ConnectActivity";
+ private static final int CONNECTION_REQUEST = 1;
+ private static final int PERMISSION_REQUEST = 2;
+ private static final int REMOVE_FAVORITE_INDEX = 0;
+ private static boolean commandLineRun;
+
+ private ImageButton addFavoriteButton;
+ private EditText roomEditText;
+ private ListView roomListView;
+ private SharedPreferences sharedPref;
+ private String keyprefResolution;
+ private String keyprefFps;
+ private String keyprefVideoBitrateType;
+ private String keyprefVideoBitrateValue;
+ private String keyprefAudioBitrateType;
+ private String keyprefAudioBitrateValue;
+ private String keyprefRoomServerUrl;
+ private String keyprefRoom;
+ private String keyprefRoomList;
+ private ArrayList<String> roomList;
+ private ArrayAdapter<String> adapter;
+
+ @Override
+ public void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+
+ // Get setting keys.
+ PreferenceManager.setDefaultValues(this, R.xml.preferences, false);
+ sharedPref = PreferenceManager.getDefaultSharedPreferences(this);
+ keyprefResolution = getString(R.string.pref_resolution_key);
+ keyprefFps = getString(R.string.pref_fps_key);
+ keyprefVideoBitrateType = getString(R.string.pref_maxvideobitrate_key);
+ keyprefVideoBitrateValue = getString(R.string.pref_maxvideobitratevalue_key);
+ keyprefAudioBitrateType = getString(R.string.pref_startaudiobitrate_key);
+ keyprefAudioBitrateValue = getString(R.string.pref_startaudiobitratevalue_key);
+ keyprefRoomServerUrl = getString(R.string.pref_room_server_url_key);
+ keyprefRoom = getString(R.string.pref_room_key);
+ keyprefRoomList = getString(R.string.pref_room_list_key);
+
+ setContentView(R.layout.activity_connect);
+
+ roomEditText = findViewById(R.id.room_edittext);
+ roomEditText.setOnEditorActionListener(new TextView.OnEditorActionListener() {
+ @Override
+ public boolean onEditorAction(TextView textView, int i, KeyEvent keyEvent) {
+ if (i == EditorInfo.IME_ACTION_DONE) {
+ addFavoriteButton.performClick();
+ return true;
+ }
+ return false;
+ }
+ });
+ roomEditText.requestFocus();
+
+ roomListView = findViewById(R.id.room_listview);
+ roomListView.setEmptyView(findViewById(android.R.id.empty));
+ roomListView.setOnItemClickListener(roomListClickListener);
+ registerForContextMenu(roomListView);
+ ImageButton connectButton = findViewById(R.id.connect_button);
+ connectButton.setOnClickListener(connectListener);
+ addFavoriteButton = findViewById(R.id.add_favorite_button);
+ addFavoriteButton.setOnClickListener(addFavoriteListener);
+
+ requestPermissions();
+ }
+
+ @Override
+ public boolean onCreateOptionsMenu(Menu menu) {
+ getMenuInflater().inflate(R.menu.connect_menu, menu);
+ return true;
+ }
+
+ @Override
+ public void onCreateContextMenu(ContextMenu menu, View v, ContextMenu.ContextMenuInfo menuInfo) {
+ if (v.getId() == R.id.room_listview) {
+ AdapterView.AdapterContextMenuInfo info = (AdapterView.AdapterContextMenuInfo) menuInfo;
+ menu.setHeaderTitle(roomList.get(info.position));
+ String[] menuItems = getResources().getStringArray(R.array.roomListContextMenu);
+ for (int i = 0; i < menuItems.length; i++) {
+ menu.add(Menu.NONE, i, i, menuItems[i]);
+ }
+ } else {
+ super.onCreateContextMenu(menu, v, menuInfo);
+ }
+ }
+
+ @Override
+ public boolean onContextItemSelected(MenuItem item) {
+ if (item.getItemId() == REMOVE_FAVORITE_INDEX) {
+ AdapterView.AdapterContextMenuInfo info =
+ (AdapterView.AdapterContextMenuInfo) item.getMenuInfo();
+ roomList.remove(info.position);
+ adapter.notifyDataSetChanged();
+ return true;
+ }
+
+ return super.onContextItemSelected(item);
+ }
+
+ @Override
+ public boolean onOptionsItemSelected(MenuItem item) {
+ // Handle presses on the action bar items.
+ if (item.getItemId() == R.id.action_settings) {
+ Intent intent = new Intent(this, SettingsActivity.class);
+ startActivity(intent);
+ return true;
+ } else if (item.getItemId() == R.id.action_loopback) {
+ connectToRoom(null, false, true, false, 0);
+ return true;
+ } else {
+ return super.onOptionsItemSelected(item);
+ }
+ }
+
+ @Override
+ public void onPause() {
+ super.onPause();
+ String room = roomEditText.getText().toString();
+ String roomListJson = new JSONArray(roomList).toString();
+ SharedPreferences.Editor editor = sharedPref.edit();
+ editor.putString(keyprefRoom, room);
+ editor.putString(keyprefRoomList, roomListJson);
+ editor.commit();
+ }
+
+ @Override
+ public void onResume() {
+ super.onResume();
+ String room = sharedPref.getString(keyprefRoom, "");
+ roomEditText.setText(room);
+ roomList = new ArrayList<>();
+ String roomListJson = sharedPref.getString(keyprefRoomList, null);
+ if (roomListJson != null) {
+ try {
+ JSONArray jsonArray = new JSONArray(roomListJson);
+ for (int i = 0; i < jsonArray.length(); i++) {
+ roomList.add(jsonArray.get(i).toString());
+ }
+ } catch (JSONException e) {
+ Log.e(TAG, "Failed to load room list: " + e.toString());
+ }
+ }
+ adapter = new ArrayAdapter<>(this, android.R.layout.simple_list_item_1, roomList);
+ roomListView.setAdapter(adapter);
+ if (adapter.getCount() > 0) {
+ roomListView.requestFocus();
+ roomListView.setItemChecked(0, true);
+ }
+ }
+
+ @Override
+ protected void onActivityResult(int requestCode, int resultCode, Intent data) {
+ if (requestCode == CONNECTION_REQUEST && commandLineRun) {
+ Log.d(TAG, "Return: " + resultCode);
+ setResult(resultCode);
+ commandLineRun = false;
+ finish();
+ }
+ }
+
+ @Override
+ public void onRequestPermissionsResult(
+ int requestCode, String[] permissions, int[] grantResults) {
+ if (requestCode == PERMISSION_REQUEST) {
+ String[] missingPermissions = getMissingPermissions();
+ if (missingPermissions.length != 0) {
+ // User didn't grant all the permissions. Warn that the application might not work
+ // correctly.
+ new AlertDialog.Builder(this)
+ .setMessage(R.string.missing_permissions_try_again)
+ .setPositiveButton(R.string.yes,
+ (dialog, id) -> {
+ // User wants to try giving the permissions again.
+ dialog.cancel();
+ requestPermissions();
+ })
+ .setNegativeButton(R.string.no,
+ (dialog, id) -> {
+ // User doesn't want to give the permissions.
+ dialog.cancel();
+ onPermissionsGranted();
+ })
+ .show();
+ } else {
+ // All permissions granted.
+ onPermissionsGranted();
+ }
+ }
+ }
+
+ private void onPermissionsGranted() {
+ // If an implicit VIEW intent is launching the app, go directly to that URL.
+ final Intent intent = getIntent();
+ if ("android.intent.action.VIEW".equals(intent.getAction()) && !commandLineRun) {
+ boolean loopback = intent.getBooleanExtra(CallActivity.EXTRA_LOOPBACK, false);
+ int runTimeMs = intent.getIntExtra(CallActivity.EXTRA_RUNTIME, 0);
+ boolean useValuesFromIntent =
+ intent.getBooleanExtra(CallActivity.EXTRA_USE_VALUES_FROM_INTENT, false);
+ String room = sharedPref.getString(keyprefRoom, "");
+ connectToRoom(room, true, loopback, useValuesFromIntent, runTimeMs);
+ }
+ }
+
+ @TargetApi(Build.VERSION_CODES.M)
+ private void requestPermissions() {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
+ // Dynamic permissions are not required before Android M.
+ onPermissionsGranted();
+ return;
+ }
+
+ String[] missingPermissions = getMissingPermissions();
+ if (missingPermissions.length != 0) {
+ requestPermissions(missingPermissions, PERMISSION_REQUEST);
+ } else {
+ onPermissionsGranted();
+ }
+ }
+
+ @TargetApi(Build.VERSION_CODES.M)
+ private String[] getMissingPermissions() {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
+ return new String[0];
+ }
+
+ PackageInfo info;
+ try {
+ info = getPackageManager().getPackageInfo(getPackageName(), PackageManager.GET_PERMISSIONS);
+ } catch (PackageManager.NameNotFoundException e) {
+ Log.w(TAG, "Failed to retrieve permissions.");
+ return new String[0];
+ }
+
+ if (info.requestedPermissions == null) {
+ Log.w(TAG, "No requested permissions.");
+ return new String[0];
+ }
+
+ ArrayList<String> missingPermissions = new ArrayList<>();
+ for (int i = 0; i < info.requestedPermissions.length; i++) {
+ if ((info.requestedPermissionsFlags[i] & PackageInfo.REQUESTED_PERMISSION_GRANTED) == 0) {
+ missingPermissions.add(info.requestedPermissions[i]);
+ }
+ }
+ Log.d(TAG, "Missing permissions: " + missingPermissions);
+
+ return missingPermissions.toArray(new String[missingPermissions.size()]);
+ }
+
+ /**
+ * Get a value from the shared preference or from the intent, if it does not
+ * exist the default is used.
+ */
+ @Nullable
+ private String sharedPrefGetString(
+ int attributeId, String intentName, int defaultId, boolean useFromIntent) {
+ String defaultValue = getString(defaultId);
+ if (useFromIntent) {
+ String value = getIntent().getStringExtra(intentName);
+ if (value != null) {
+ return value;
+ }
+ return defaultValue;
+ } else {
+ String attributeName = getString(attributeId);
+ return sharedPref.getString(attributeName, defaultValue);
+ }
+ }
+
+ /**
+ * Get a value from the shared preference or from the intent, if it does not
+ * exist the default is used.
+ */
+ private boolean sharedPrefGetBoolean(
+ int attributeId, String intentName, int defaultId, boolean useFromIntent) {
+ boolean defaultValue = Boolean.parseBoolean(getString(defaultId));
+ if (useFromIntent) {
+ return getIntent().getBooleanExtra(intentName, defaultValue);
+ } else {
+ String attributeName = getString(attributeId);
+ return sharedPref.getBoolean(attributeName, defaultValue);
+ }
+ }
+
+ /**
+ * Get a value from the shared preference or from the intent, if it does not
+ * exist the default is used.
+ */
+ private int sharedPrefGetInteger(
+ int attributeId, String intentName, int defaultId, boolean useFromIntent) {
+ String defaultString = getString(defaultId);
+ int defaultValue = Integer.parseInt(defaultString);
+ if (useFromIntent) {
+ return getIntent().getIntExtra(intentName, defaultValue);
+ } else {
+ String attributeName = getString(attributeId);
+ String value = sharedPref.getString(attributeName, defaultString);
+ try {
+ return Integer.parseInt(value);
+ } catch (NumberFormatException e) {
+ Log.e(TAG, "Wrong setting for: " + attributeName + ":" + value);
+ return defaultValue;
+ }
+ }
+ }
+
+ @SuppressWarnings("StringSplitter")
+ private void connectToRoom(String roomId, boolean commandLineRun, boolean loopback,
+ boolean useValuesFromIntent, int runTimeMs) {
+ ConnectActivity.commandLineRun = commandLineRun;
+
+ // roomId is random for loopback.
+ if (loopback) {
+ roomId = Integer.toString((new Random()).nextInt(100000000));
+ }
+
+ String roomUrl = sharedPref.getString(
+ keyprefRoomServerUrl, getString(R.string.pref_room_server_url_default));
+
+ // Video call enabled flag.
+ boolean videoCallEnabled = sharedPrefGetBoolean(R.string.pref_videocall_key,
+ CallActivity.EXTRA_VIDEO_CALL, R.string.pref_videocall_default, useValuesFromIntent);
+
+ // Use screencapture option.
+ boolean useScreencapture = sharedPrefGetBoolean(R.string.pref_screencapture_key,
+ CallActivity.EXTRA_SCREENCAPTURE, R.string.pref_screencapture_default, useValuesFromIntent);
+
+ // Use Camera2 option.
+ boolean useCamera2 = sharedPrefGetBoolean(R.string.pref_camera2_key, CallActivity.EXTRA_CAMERA2,
+ R.string.pref_camera2_default, useValuesFromIntent);
+
+ // Get default codecs.
+ String videoCodec = sharedPrefGetString(R.string.pref_videocodec_key,
+ CallActivity.EXTRA_VIDEOCODEC, R.string.pref_videocodec_default, useValuesFromIntent);
+ String audioCodec = sharedPrefGetString(R.string.pref_audiocodec_key,
+ CallActivity.EXTRA_AUDIOCODEC, R.string.pref_audiocodec_default, useValuesFromIntent);
+
+ // Check HW codec flag.
+ boolean hwCodec = sharedPrefGetBoolean(R.string.pref_hwcodec_key,
+ CallActivity.EXTRA_HWCODEC_ENABLED, R.string.pref_hwcodec_default, useValuesFromIntent);
+
+ // Check Capture to texture.
+ boolean captureToTexture = sharedPrefGetBoolean(R.string.pref_capturetotexture_key,
+ CallActivity.EXTRA_CAPTURETOTEXTURE_ENABLED, R.string.pref_capturetotexture_default,
+ useValuesFromIntent);
+
+ // Check FlexFEC.
+ boolean flexfecEnabled = sharedPrefGetBoolean(R.string.pref_flexfec_key,
+ CallActivity.EXTRA_FLEXFEC_ENABLED, R.string.pref_flexfec_default, useValuesFromIntent);
+
+ // Check Disable Audio Processing flag.
+ boolean noAudioProcessing = sharedPrefGetBoolean(R.string.pref_noaudioprocessing_key,
+ CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED, R.string.pref_noaudioprocessing_default,
+ useValuesFromIntent);
+
+ boolean aecDump = sharedPrefGetBoolean(R.string.pref_aecdump_key,
+ CallActivity.EXTRA_AECDUMP_ENABLED, R.string.pref_aecdump_default, useValuesFromIntent);
+
+ boolean saveInputAudioToFile =
+ sharedPrefGetBoolean(R.string.pref_enable_save_input_audio_to_file_key,
+ CallActivity.EXTRA_SAVE_INPUT_AUDIO_TO_FILE_ENABLED,
+ R.string.pref_enable_save_input_audio_to_file_default, useValuesFromIntent);
+
+ // Check OpenSL ES enabled flag.
+ boolean useOpenSLES = sharedPrefGetBoolean(R.string.pref_opensles_key,
+ CallActivity.EXTRA_OPENSLES_ENABLED, R.string.pref_opensles_default, useValuesFromIntent);
+
+ // Check Disable built-in AEC flag.
+ boolean disableBuiltInAEC = sharedPrefGetBoolean(R.string.pref_disable_built_in_aec_key,
+ CallActivity.EXTRA_DISABLE_BUILT_IN_AEC, R.string.pref_disable_built_in_aec_default,
+ useValuesFromIntent);
+
+ // Check Disable built-in AGC flag.
+ boolean disableBuiltInAGC = sharedPrefGetBoolean(R.string.pref_disable_built_in_agc_key,
+ CallActivity.EXTRA_DISABLE_BUILT_IN_AGC, R.string.pref_disable_built_in_agc_default,
+ useValuesFromIntent);
+
+ // Check Disable built-in NS flag.
+ boolean disableBuiltInNS = sharedPrefGetBoolean(R.string.pref_disable_built_in_ns_key,
+ CallActivity.EXTRA_DISABLE_BUILT_IN_NS, R.string.pref_disable_built_in_ns_default,
+ useValuesFromIntent);
+
+ // Check Disable gain control
+ boolean disableWebRtcAGCAndHPF = sharedPrefGetBoolean(
+ R.string.pref_disable_webrtc_agc_and_hpf_key, CallActivity.EXTRA_DISABLE_WEBRTC_AGC_AND_HPF,
+ R.string.pref_disable_webrtc_agc_and_hpf_key, useValuesFromIntent);
+
+ // Get video resolution from settings.
+ int videoWidth = 0;
+ int videoHeight = 0;
+ if (useValuesFromIntent) {
+ videoWidth = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_WIDTH, 0);
+ videoHeight = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_HEIGHT, 0);
+ }
+ if (videoWidth == 0 && videoHeight == 0) {
+ String resolution =
+ sharedPref.getString(keyprefResolution, getString(R.string.pref_resolution_default));
+ String[] dimensions = resolution.split("[ x]+");
+ if (dimensions.length == 2) {
+ try {
+ videoWidth = Integer.parseInt(dimensions[0]);
+ videoHeight = Integer.parseInt(dimensions[1]);
+ } catch (NumberFormatException e) {
+ videoWidth = 0;
+ videoHeight = 0;
+ Log.e(TAG, "Wrong video resolution setting: " + resolution);
+ }
+ }
+ }
+
+ // Get camera fps from settings.
+ int cameraFps = 0;
+ if (useValuesFromIntent) {
+ cameraFps = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_FPS, 0);
+ }
+ if (cameraFps == 0) {
+ String fps = sharedPref.getString(keyprefFps, getString(R.string.pref_fps_default));
+ String[] fpsValues = fps.split("[ x]+");
+ if (fpsValues.length == 2) {
+ try {
+ cameraFps = Integer.parseInt(fpsValues[0]);
+ } catch (NumberFormatException e) {
+ cameraFps = 0;
+ Log.e(TAG, "Wrong camera fps setting: " + fps);
+ }
+ }
+ }
+
+ // Check capture quality slider flag.
+ boolean captureQualitySlider = sharedPrefGetBoolean(R.string.pref_capturequalityslider_key,
+ CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED,
+ R.string.pref_capturequalityslider_default, useValuesFromIntent);
+
+ // Get video and audio start bitrate.
+ int videoStartBitrate = 0;
+ if (useValuesFromIntent) {
+ videoStartBitrate = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_BITRATE, 0);
+ }
+ if (videoStartBitrate == 0) {
+ String bitrateTypeDefault = getString(R.string.pref_maxvideobitrate_default);
+ String bitrateType = sharedPref.getString(keyprefVideoBitrateType, bitrateTypeDefault);
+ if (!bitrateType.equals(bitrateTypeDefault)) {
+ String bitrateValue = sharedPref.getString(
+ keyprefVideoBitrateValue, getString(R.string.pref_maxvideobitratevalue_default));
+ videoStartBitrate = Integer.parseInt(bitrateValue);
+ }
+ }
+
+ int audioStartBitrate = 0;
+ if (useValuesFromIntent) {
+ audioStartBitrate = getIntent().getIntExtra(CallActivity.EXTRA_AUDIO_BITRATE, 0);
+ }
+ if (audioStartBitrate == 0) {
+ String bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default);
+ String bitrateType = sharedPref.getString(keyprefAudioBitrateType, bitrateTypeDefault);
+ if (!bitrateType.equals(bitrateTypeDefault)) {
+ String bitrateValue = sharedPref.getString(
+ keyprefAudioBitrateValue, getString(R.string.pref_startaudiobitratevalue_default));
+ audioStartBitrate = Integer.parseInt(bitrateValue);
+ }
+ }
+
+ // Check statistics display option.
+ boolean displayHud = sharedPrefGetBoolean(R.string.pref_displayhud_key,
+ CallActivity.EXTRA_DISPLAY_HUD, R.string.pref_displayhud_default, useValuesFromIntent);
+
+ boolean tracing = sharedPrefGetBoolean(R.string.pref_tracing_key, CallActivity.EXTRA_TRACING,
+ R.string.pref_tracing_default, useValuesFromIntent);
+
+ // Check Enable RtcEventLog.
+ boolean rtcEventLogEnabled = sharedPrefGetBoolean(R.string.pref_enable_rtceventlog_key,
+ CallActivity.EXTRA_ENABLE_RTCEVENTLOG, R.string.pref_enable_rtceventlog_default,
+ useValuesFromIntent);
+
+ // Get datachannel options
+ boolean dataChannelEnabled = sharedPrefGetBoolean(R.string.pref_enable_datachannel_key,
+ CallActivity.EXTRA_DATA_CHANNEL_ENABLED, R.string.pref_enable_datachannel_default,
+ useValuesFromIntent);
+ boolean ordered = sharedPrefGetBoolean(R.string.pref_ordered_key, CallActivity.EXTRA_ORDERED,
+ R.string.pref_ordered_default, useValuesFromIntent);
+ boolean negotiated = sharedPrefGetBoolean(R.string.pref_negotiated_key,
+ CallActivity.EXTRA_NEGOTIATED, R.string.pref_negotiated_default, useValuesFromIntent);
+ int maxRetrMs = sharedPrefGetInteger(R.string.pref_max_retransmit_time_ms_key,
+ CallActivity.EXTRA_MAX_RETRANSMITS_MS, R.string.pref_max_retransmit_time_ms_default,
+ useValuesFromIntent);
+ int maxRetr =
+ sharedPrefGetInteger(R.string.pref_max_retransmits_key, CallActivity.EXTRA_MAX_RETRANSMITS,
+ R.string.pref_max_retransmits_default, useValuesFromIntent);
+ int id = sharedPrefGetInteger(R.string.pref_data_id_key, CallActivity.EXTRA_ID,
+ R.string.pref_data_id_default, useValuesFromIntent);
+ String protocol = sharedPrefGetString(R.string.pref_data_protocol_key,
+ CallActivity.EXTRA_PROTOCOL, R.string.pref_data_protocol_default, useValuesFromIntent);
+
+ // Start AppRTCMobile activity.
+ Log.d(TAG, "Connecting to room " + roomId + " at URL " + roomUrl);
+ if (validateUrl(roomUrl)) {
+ Uri uri = Uri.parse(roomUrl);
+ Intent intent = new Intent(this, CallActivity.class);
+ intent.setData(uri);
+ intent.putExtra(CallActivity.EXTRA_ROOMID, roomId);
+ intent.putExtra(CallActivity.EXTRA_LOOPBACK, loopback);
+ intent.putExtra(CallActivity.EXTRA_VIDEO_CALL, videoCallEnabled);
+ intent.putExtra(CallActivity.EXTRA_SCREENCAPTURE, useScreencapture);
+ intent.putExtra(CallActivity.EXTRA_CAMERA2, useCamera2);
+ intent.putExtra(CallActivity.EXTRA_VIDEO_WIDTH, videoWidth);
+ intent.putExtra(CallActivity.EXTRA_VIDEO_HEIGHT, videoHeight);
+ intent.putExtra(CallActivity.EXTRA_VIDEO_FPS, cameraFps);
+ intent.putExtra(CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED, captureQualitySlider);
+ intent.putExtra(CallActivity.EXTRA_VIDEO_BITRATE, videoStartBitrate);
+ intent.putExtra(CallActivity.EXTRA_VIDEOCODEC, videoCodec);
+ intent.putExtra(CallActivity.EXTRA_HWCODEC_ENABLED, hwCodec);
+ intent.putExtra(CallActivity.EXTRA_CAPTURETOTEXTURE_ENABLED, captureToTexture);
+ intent.putExtra(CallActivity.EXTRA_FLEXFEC_ENABLED, flexfecEnabled);
+ intent.putExtra(CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED, noAudioProcessing);
+ intent.putExtra(CallActivity.EXTRA_AECDUMP_ENABLED, aecDump);
+ intent.putExtra(CallActivity.EXTRA_SAVE_INPUT_AUDIO_TO_FILE_ENABLED, saveInputAudioToFile);
+ intent.putExtra(CallActivity.EXTRA_OPENSLES_ENABLED, useOpenSLES);
+ intent.putExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_AEC, disableBuiltInAEC);
+ intent.putExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_AGC, disableBuiltInAGC);
+ intent.putExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_NS, disableBuiltInNS);
+ intent.putExtra(CallActivity.EXTRA_DISABLE_WEBRTC_AGC_AND_HPF, disableWebRtcAGCAndHPF);
+ intent.putExtra(CallActivity.EXTRA_AUDIO_BITRATE, audioStartBitrate);
+ intent.putExtra(CallActivity.EXTRA_AUDIOCODEC, audioCodec);
+ intent.putExtra(CallActivity.EXTRA_DISPLAY_HUD, displayHud);
+ intent.putExtra(CallActivity.EXTRA_TRACING, tracing);
+ intent.putExtra(CallActivity.EXTRA_ENABLE_RTCEVENTLOG, rtcEventLogEnabled);
+ intent.putExtra(CallActivity.EXTRA_CMDLINE, commandLineRun);
+ intent.putExtra(CallActivity.EXTRA_RUNTIME, runTimeMs);
+ intent.putExtra(CallActivity.EXTRA_DATA_CHANNEL_ENABLED, dataChannelEnabled);
+
+ if (dataChannelEnabled) {
+ intent.putExtra(CallActivity.EXTRA_ORDERED, ordered);
+ intent.putExtra(CallActivity.EXTRA_MAX_RETRANSMITS_MS, maxRetrMs);
+ intent.putExtra(CallActivity.EXTRA_MAX_RETRANSMITS, maxRetr);
+ intent.putExtra(CallActivity.EXTRA_PROTOCOL, protocol);
+ intent.putExtra(CallActivity.EXTRA_NEGOTIATED, negotiated);
+ intent.putExtra(CallActivity.EXTRA_ID, id);
+ }
+
+ if (useValuesFromIntent) {
+ if (getIntent().hasExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA)) {
+ String videoFileAsCamera =
+ getIntent().getStringExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA);
+ intent.putExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA, videoFileAsCamera);
+ }
+
+ if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE)) {
+ String saveRemoteVideoToFile =
+ getIntent().getStringExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE);
+ intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE, saveRemoteVideoToFile);
+ }
+
+ if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH)) {
+ int videoOutWidth =
+ getIntent().getIntExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, 0);
+ intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, videoOutWidth);
+ }
+
+ if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT)) {
+ int videoOutHeight =
+ getIntent().getIntExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, 0);
+ intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, videoOutHeight);
+ }
+ }
+
+ startActivityForResult(intent, CONNECTION_REQUEST);
+ }
+ }
+
+ private boolean validateUrl(String url) {
+ if (URLUtil.isHttpsUrl(url) || URLUtil.isHttpUrl(url)) {
+ return true;
+ }
+
+ new AlertDialog.Builder(this)
+ .setTitle(getText(R.string.invalid_url_title))
+ .setMessage(getString(R.string.invalid_url_text, url))
+ .setCancelable(false)
+ .setNeutralButton(R.string.ok,
+ new DialogInterface.OnClickListener() {
+ @Override
+ public void onClick(DialogInterface dialog, int id) {
+ dialog.cancel();
+ }
+ })
+ .create()
+ .show();
+ return false;
+ }
+
+ private final AdapterView.OnItemClickListener roomListClickListener =
+ new AdapterView.OnItemClickListener() {
+ @Override
+ public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) {
+ String roomId = ((TextView) view).getText().toString();
+ connectToRoom(roomId, false, false, false, 0);
+ }
+ };
+
+ private final OnClickListener addFavoriteListener = new OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ String newRoom = roomEditText.getText().toString();
+ if (newRoom.length() > 0 && !roomList.contains(newRoom)) {
+ adapter.add(newRoom);
+ adapter.notifyDataSetChanged();
+ }
+ }
+ };
+
+ private final OnClickListener connectListener = new OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ connectToRoom(roomEditText.getText().toString(), false, false, false, 0);
+ }
+ };
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CpuMonitor.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CpuMonitor.java
new file mode 100644
index 0000000000..1c64621864
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CpuMonitor.java
@@ -0,0 +1,521 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.os.BatteryManager;
+import android.os.Build;
+import android.os.SystemClock;
+import android.util.Log;
+import androidx.annotation.Nullable;
+import java.io.BufferedReader;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.nio.charset.Charset;
+import java.util.Arrays;
+import java.util.Scanner;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Simple CPU monitor. The caller creates a CpuMonitor object which can then
+ * be used via sampleCpuUtilization() to collect the percentual use of the
+ * cumulative CPU capacity for all CPUs running at their nominal frequency. 3
+ * values are generated: (1) getCpuCurrent() returns the use since the last
+ * sampleCpuUtilization(), (2) getCpuAvg3() returns the use since 3 prior
+ * calls, and (3) getCpuAvgAll() returns the use over all SAMPLE_SAVE_NUMBER
+ * calls.
+ *
+ * <p>CPUs in Android are often "offline", and while this of course means 0 Hz
+ * as current frequency, in this state we cannot even get their nominal
+ * frequency. We therefore tread carefully, and allow any CPU to be missing.
+ * Missing CPUs are assumed to have the same nominal frequency as any close
+ * lower-numbered CPU, but as soon as it is online, we'll get their proper
+ * frequency and remember it. (Since CPU 0 in practice always seem to be
+ * online, this unidirectional frequency inheritance should be no problem in
+ * practice.)
+ *
+ * <p>Caveats:
+ * o No provision made for zany "turbo" mode, common in the x86 world.
+ * o No provision made for ARM big.LITTLE; if CPU n can switch behind our
+ * back, we might get incorrect estimates.
+ * o This is not thread-safe. To call asynchronously, create different
+ * CpuMonitor objects.
+ *
+ * <p>If we can gather enough info to generate a sensible result,
+ * sampleCpuUtilization returns true. It is designed to never throw an
+ * exception.
+ *
+ * <p>sampleCpuUtilization should not be called too often in its present form,
+ * since then deltas would be small and the percent values would fluctuate and
+ * be unreadable. If it is desirable to call it more often than say once per
+ * second, one would need to increase SAMPLE_SAVE_NUMBER and probably use
+ * Queue<Integer> to avoid copying overhead.
+ *
+ * <p>Known problems:
+ * 1. Nexus 7 devices running Kitkat have a kernel which often output an
+ * incorrect 'idle' field in /proc/stat. The value is close to twice the
+ * correct value, and then returns to back to correct reading. Both when
+ * jumping up and back down we might create faulty CPU load readings.
+ */
+class CpuMonitor {
+ private static final String TAG = "CpuMonitor";
+ private static final int MOVING_AVERAGE_SAMPLES = 5;
+
+ private static final int CPU_STAT_SAMPLE_PERIOD_MS = 2000;
+ private static final int CPU_STAT_LOG_PERIOD_MS = 6000;
+
+ private final Context appContext;
+ // User CPU usage at current frequency.
+ private final MovingAverage userCpuUsage;
+ // System CPU usage at current frequency.
+ private final MovingAverage systemCpuUsage;
+ // Total CPU usage relative to maximum frequency.
+ private final MovingAverage totalCpuUsage;
+ // CPU frequency in percentage from maximum.
+ private final MovingAverage frequencyScale;
+
+ @Nullable
+ private ScheduledExecutorService executor;
+ private long lastStatLogTimeMs;
+ private long[] cpuFreqMax;
+ private int cpusPresent;
+ private int actualCpusPresent;
+ private boolean initialized;
+ private boolean cpuOveruse;
+ private String[] maxPath;
+ private String[] curPath;
+ private double[] curFreqScales;
+ @Nullable
+ private ProcStat lastProcStat;
+
+ private static class ProcStat {
+ final long userTime;
+ final long systemTime;
+ final long idleTime;
+
+ ProcStat(long userTime, long systemTime, long idleTime) {
+ this.userTime = userTime;
+ this.systemTime = systemTime;
+ this.idleTime = idleTime;
+ }
+ }
+
+ private static class MovingAverage {
+ private final int size;
+ private double sum;
+ private double currentValue;
+ private double[] circBuffer;
+ private int circBufferIndex;
+
+ public MovingAverage(int size) {
+ if (size <= 0) {
+ throw new AssertionError("Size value in MovingAverage ctor should be positive.");
+ }
+ this.size = size;
+ circBuffer = new double[size];
+ }
+
+ public void reset() {
+ Arrays.fill(circBuffer, 0);
+ circBufferIndex = 0;
+ sum = 0;
+ currentValue = 0;
+ }
+
+ public void addValue(double value) {
+ sum -= circBuffer[circBufferIndex];
+ circBuffer[circBufferIndex++] = value;
+ currentValue = value;
+ sum += value;
+ if (circBufferIndex >= size) {
+ circBufferIndex = 0;
+ }
+ }
+
+ public double getCurrent() {
+ return currentValue;
+ }
+
+ public double getAverage() {
+ return sum / (double) size;
+ }
+ }
+
+ public static boolean isSupported() {
+ return Build.VERSION.SDK_INT < Build.VERSION_CODES.N;
+ }
+
+ public CpuMonitor(Context context) {
+ if (!isSupported()) {
+ throw new RuntimeException("CpuMonitor is not supported on this Android version.");
+ }
+
+ Log.d(TAG, "CpuMonitor ctor.");
+ appContext = context.getApplicationContext();
+ userCpuUsage = new MovingAverage(MOVING_AVERAGE_SAMPLES);
+ systemCpuUsage = new MovingAverage(MOVING_AVERAGE_SAMPLES);
+ totalCpuUsage = new MovingAverage(MOVING_AVERAGE_SAMPLES);
+ frequencyScale = new MovingAverage(MOVING_AVERAGE_SAMPLES);
+ lastStatLogTimeMs = SystemClock.elapsedRealtime();
+
+ scheduleCpuUtilizationTask();
+ }
+
+ public void pause() {
+ if (executor != null) {
+ Log.d(TAG, "pause");
+ executor.shutdownNow();
+ executor = null;
+ }
+ }
+
+ public void resume() {
+ Log.d(TAG, "resume");
+ resetStat();
+ scheduleCpuUtilizationTask();
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void reset() {
+ if (executor != null) {
+ Log.d(TAG, "reset");
+ resetStat();
+ cpuOveruse = false;
+ }
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized int getCpuUsageCurrent() {
+ return doubleToPercent(userCpuUsage.getCurrent() + systemCpuUsage.getCurrent());
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized int getCpuUsageAverage() {
+ return doubleToPercent(userCpuUsage.getAverage() + systemCpuUsage.getAverage());
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized int getFrequencyScaleAverage() {
+ return doubleToPercent(frequencyScale.getAverage());
+ }
+
+ private void scheduleCpuUtilizationTask() {
+ if (executor != null) {
+ executor.shutdownNow();
+ executor = null;
+ }
+
+ executor = Executors.newSingleThreadScheduledExecutor();
+ @SuppressWarnings("unused") // Prevent downstream linter warnings.
+ Future<?> possiblyIgnoredError = executor.scheduleAtFixedRate(new Runnable() {
+ @Override
+ public void run() {
+ cpuUtilizationTask();
+ }
+ }, 0, CPU_STAT_SAMPLE_PERIOD_MS, TimeUnit.MILLISECONDS);
+ }
+
+ private void cpuUtilizationTask() {
+ boolean cpuMonitorAvailable = sampleCpuUtilization();
+ if (cpuMonitorAvailable
+ && SystemClock.elapsedRealtime() - lastStatLogTimeMs >= CPU_STAT_LOG_PERIOD_MS) {
+ lastStatLogTimeMs = SystemClock.elapsedRealtime();
+ String statString = getStatString();
+ Log.d(TAG, statString);
+ }
+ }
+
+ private void init() {
+ try (FileInputStream fin = new FileInputStream("/sys/devices/system/cpu/present");
+ InputStreamReader streamReader = new InputStreamReader(fin, Charset.forName("UTF-8"));
+ BufferedReader reader = new BufferedReader(streamReader);
+ Scanner scanner = new Scanner(reader).useDelimiter("[-\n]");) {
+ scanner.nextInt(); // Skip leading number 0.
+ cpusPresent = 1 + scanner.nextInt();
+ scanner.close();
+ } catch (FileNotFoundException e) {
+ Log.e(TAG, "Cannot do CPU stats since /sys/devices/system/cpu/present is missing");
+ } catch (IOException e) {
+ Log.e(TAG, "Error closing file");
+ } catch (Exception e) {
+ Log.e(TAG, "Cannot do CPU stats due to /sys/devices/system/cpu/present parsing problem");
+ }
+
+ cpuFreqMax = new long[cpusPresent];
+ maxPath = new String[cpusPresent];
+ curPath = new String[cpusPresent];
+ curFreqScales = new double[cpusPresent];
+ for (int i = 0; i < cpusPresent; i++) {
+ cpuFreqMax[i] = 0; // Frequency "not yet determined".
+ curFreqScales[i] = 0;
+ maxPath[i] = "/sys/devices/system/cpu/cpu" + i + "/cpufreq/cpuinfo_max_freq";
+ curPath[i] = "/sys/devices/system/cpu/cpu" + i + "/cpufreq/scaling_cur_freq";
+ }
+
+ lastProcStat = new ProcStat(0, 0, 0);
+ resetStat();
+
+ initialized = true;
+ }
+
+ private synchronized void resetStat() {
+ userCpuUsage.reset();
+ systemCpuUsage.reset();
+ totalCpuUsage.reset();
+ frequencyScale.reset();
+ lastStatLogTimeMs = SystemClock.elapsedRealtime();
+ }
+
+ private int getBatteryLevel() {
+ // Use sticky broadcast with null receiver to read battery level once only.
+ Intent intent = appContext.registerReceiver(
+ null /* receiver */, new IntentFilter(Intent.ACTION_BATTERY_CHANGED));
+
+ int batteryLevel = 0;
+ int batteryScale = intent.getIntExtra(BatteryManager.EXTRA_SCALE, 100);
+ if (batteryScale > 0) {
+ batteryLevel =
+ (int) (100f * intent.getIntExtra(BatteryManager.EXTRA_LEVEL, 0) / batteryScale);
+ }
+ return batteryLevel;
+ }
+
+ /**
+ * Re-measure CPU use. Call this method at an interval of around 1/s.
+ * This method returns true on success. The fields
+ * cpuCurrent, cpuAvg3, and cpuAvgAll are updated on success, and represents:
+ * cpuCurrent: The CPU use since the last sampleCpuUtilization call.
+ * cpuAvg3: The average CPU over the last 3 calls.
+ * cpuAvgAll: The average CPU over the last SAMPLE_SAVE_NUMBER calls.
+ */
+ private synchronized boolean sampleCpuUtilization() {
+ long lastSeenMaxFreq = 0;
+ long cpuFreqCurSum = 0;
+ long cpuFreqMaxSum = 0;
+
+ if (!initialized) {
+ init();
+ }
+ if (cpusPresent == 0) {
+ return false;
+ }
+
+ actualCpusPresent = 0;
+ for (int i = 0; i < cpusPresent; i++) {
+ /*
+ * For each CPU, attempt to first read its max frequency, then its
+ * current frequency. Once as the max frequency for a CPU is found,
+ * save it in cpuFreqMax[].
+ */
+
+ curFreqScales[i] = 0;
+ if (cpuFreqMax[i] == 0) {
+ // We have never found this CPU's max frequency. Attempt to read it.
+ long cpufreqMax = readFreqFromFile(maxPath[i]);
+ if (cpufreqMax > 0) {
+ Log.d(TAG, "Core " + i + ". Max frequency: " + cpufreqMax);
+ lastSeenMaxFreq = cpufreqMax;
+ cpuFreqMax[i] = cpufreqMax;
+ maxPath[i] = null; // Kill path to free its memory.
+ }
+ } else {
+ lastSeenMaxFreq = cpuFreqMax[i]; // A valid, previously read value.
+ }
+
+ long cpuFreqCur = readFreqFromFile(curPath[i]);
+ if (cpuFreqCur == 0 && lastSeenMaxFreq == 0) {
+ // No current frequency information for this CPU core - ignore it.
+ continue;
+ }
+ if (cpuFreqCur > 0) {
+ actualCpusPresent++;
+ }
+ cpuFreqCurSum += cpuFreqCur;
+
+ /* Here, lastSeenMaxFreq might come from
+ * 1. cpuFreq[i], or
+ * 2. a previous iteration, or
+ * 3. a newly read value, or
+ * 4. hypothetically from the pre-loop dummy.
+ */
+ cpuFreqMaxSum += lastSeenMaxFreq;
+ if (lastSeenMaxFreq > 0) {
+ curFreqScales[i] = (double) cpuFreqCur / lastSeenMaxFreq;
+ }
+ }
+
+ if (cpuFreqCurSum == 0 || cpuFreqMaxSum == 0) {
+ Log.e(TAG, "Could not read max or current frequency for any CPU");
+ return false;
+ }
+
+ /*
+ * Since the cycle counts are for the period between the last invocation
+ * and this present one, we average the percentual CPU frequencies between
+ * now and the beginning of the measurement period. This is significantly
+ * incorrect only if the frequencies have peeked or dropped in between the
+ * invocations.
+ */
+ double currentFrequencyScale = cpuFreqCurSum / (double) cpuFreqMaxSum;
+ if (frequencyScale.getCurrent() > 0) {
+ currentFrequencyScale = (frequencyScale.getCurrent() + currentFrequencyScale) * 0.5;
+ }
+
+ ProcStat procStat = readProcStat();
+ if (procStat == null) {
+ return false;
+ }
+
+ long diffUserTime = procStat.userTime - lastProcStat.userTime;
+ long diffSystemTime = procStat.systemTime - lastProcStat.systemTime;
+ long diffIdleTime = procStat.idleTime - lastProcStat.idleTime;
+ long allTime = diffUserTime + diffSystemTime + diffIdleTime;
+
+ if (currentFrequencyScale == 0 || allTime == 0) {
+ return false;
+ }
+
+ // Update statistics.
+ frequencyScale.addValue(currentFrequencyScale);
+
+ double currentUserCpuUsage = diffUserTime / (double) allTime;
+ userCpuUsage.addValue(currentUserCpuUsage);
+
+ double currentSystemCpuUsage = diffSystemTime / (double) allTime;
+ systemCpuUsage.addValue(currentSystemCpuUsage);
+
+ double currentTotalCpuUsage =
+ (currentUserCpuUsage + currentSystemCpuUsage) * currentFrequencyScale;
+ totalCpuUsage.addValue(currentTotalCpuUsage);
+
+ // Save new measurements for next round's deltas.
+ lastProcStat = procStat;
+
+ return true;
+ }
+
+ private int doubleToPercent(double d) {
+ return (int) (d * 100 + 0.5);
+ }
+
+ private synchronized String getStatString() {
+ StringBuilder stat = new StringBuilder();
+ stat.append("CPU User: ")
+ .append(doubleToPercent(userCpuUsage.getCurrent()))
+ .append("/")
+ .append(doubleToPercent(userCpuUsage.getAverage()))
+ .append(". System: ")
+ .append(doubleToPercent(systemCpuUsage.getCurrent()))
+ .append("/")
+ .append(doubleToPercent(systemCpuUsage.getAverage()))
+ .append(". Freq: ")
+ .append(doubleToPercent(frequencyScale.getCurrent()))
+ .append("/")
+ .append(doubleToPercent(frequencyScale.getAverage()))
+ .append(". Total usage: ")
+ .append(doubleToPercent(totalCpuUsage.getCurrent()))
+ .append("/")
+ .append(doubleToPercent(totalCpuUsage.getAverage()))
+ .append(". Cores: ")
+ .append(actualCpusPresent);
+ stat.append("( ");
+ for (int i = 0; i < cpusPresent; i++) {
+ stat.append(doubleToPercent(curFreqScales[i])).append(" ");
+ }
+ stat.append("). Battery: ").append(getBatteryLevel());
+ if (cpuOveruse) {
+ stat.append(". Overuse.");
+ }
+ return stat.toString();
+ }
+
+ /**
+ * Read a single integer value from the named file. Return the read value
+ * or if an error occurs return 0.
+ */
+ private long readFreqFromFile(String fileName) {
+ long number = 0;
+ try (FileInputStream stream = new FileInputStream(fileName);
+ InputStreamReader streamReader = new InputStreamReader(stream, Charset.forName("UTF-8"));
+ BufferedReader reader = new BufferedReader(streamReader)) {
+ String line = reader.readLine();
+ number = parseLong(line);
+ } catch (FileNotFoundException e) {
+ // CPU core is off, so file with its scaling frequency .../cpufreq/scaling_cur_freq
+ // is not present. This is not an error.
+ } catch (IOException e) {
+ // CPU core is off, so file with its scaling frequency .../cpufreq/scaling_cur_freq
+ // is empty. This is not an error.
+ }
+ return number;
+ }
+
+ private static long parseLong(String value) {
+ long number = 0;
+ try {
+ number = Long.parseLong(value);
+ } catch (NumberFormatException e) {
+ Log.e(TAG, "parseLong error.", e);
+ }
+ return number;
+ }
+
+ /*
+ * Read the current utilization of all CPUs using the cumulative first line
+ * of /proc/stat.
+ */
+ @SuppressWarnings("StringSplitter")
+ private @Nullable ProcStat readProcStat() {
+ long userTime = 0;
+ long systemTime = 0;
+ long idleTime = 0;
+ try (FileInputStream stream = new FileInputStream("/proc/stat");
+ InputStreamReader streamReader = new InputStreamReader(stream, Charset.forName("UTF-8"));
+ BufferedReader reader = new BufferedReader(streamReader)) {
+ // line should contain something like this:
+ // cpu 5093818 271838 3512830 165934119 101374 447076 272086 0 0 0
+ // user nice system idle iowait irq softirq
+ String line = reader.readLine();
+ String[] lines = line.split("\\s+");
+ int length = lines.length;
+ if (length >= 5) {
+ userTime = parseLong(lines[1]); // user
+ userTime += parseLong(lines[2]); // nice
+ systemTime = parseLong(lines[3]); // system
+ idleTime = parseLong(lines[4]); // idle
+ }
+ if (length >= 8) {
+ userTime += parseLong(lines[5]); // iowait
+ systemTime += parseLong(lines[6]); // irq
+ systemTime += parseLong(lines[7]); // softirq
+ }
+ } catch (FileNotFoundException e) {
+ Log.e(TAG, "Cannot open /proc/stat for reading", e);
+ return null;
+ } catch (Exception e) {
+ Log.e(TAG, "Problems parsing /proc/stat", e);
+ return null;
+ }
+ return new ProcStat(userTime, systemTime, idleTime);
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/DirectRTCClient.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/DirectRTCClient.java
new file mode 100644
index 0000000000..1b113e1398
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/DirectRTCClient.java
@@ -0,0 +1,346 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.util.Log;
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.webrtc.IceCandidate;
+import org.webrtc.SessionDescription;
+
+/**
+ * Implementation of AppRTCClient that uses direct TCP connection as the signaling channel.
+ * This eliminates the need for an external server. This class does not support loopback
+ * connections.
+ */
+public class DirectRTCClient implements AppRTCClient, TCPChannelClient.TCPChannelEvents {
+ private static final String TAG = "DirectRTCClient";
+ private static final int DEFAULT_PORT = 8888;
+
+ // Regex pattern used for checking if room id looks like an IP.
+ static final Pattern IP_PATTERN = Pattern.compile("("
+ // IPv4
+ + "((\\d+\\.){3}\\d+)|"
+ // IPv6
+ + "\\[((([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?::"
+ + "(([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?)\\]|"
+ + "\\[(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4})\\]|"
+ // IPv6 without []
+ + "((([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?::(([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?)|"
+ + "(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4})|"
+ // Literals
+ + "localhost"
+ + ")"
+ // Optional port number
+ + "(:(\\d+))?");
+
+ private final ExecutorService executor;
+ private final SignalingEvents events;
+ @Nullable
+ private TCPChannelClient tcpClient;
+ private RoomConnectionParameters connectionParameters;
+
+ private enum ConnectionState { NEW, CONNECTED, CLOSED, ERROR }
+
+ // All alterations of the room state should be done from inside the looper thread.
+ private ConnectionState roomState;
+
+ public DirectRTCClient(SignalingEvents events) {
+ this.events = events;
+
+ executor = Executors.newSingleThreadExecutor();
+ roomState = ConnectionState.NEW;
+ }
+
+ /**
+ * Connects to the room, roomId in connectionsParameters is required. roomId must be a valid
+ * IP address matching IP_PATTERN.
+ */
+ @Override
+ public void connectToRoom(RoomConnectionParameters connectionParameters) {
+ this.connectionParameters = connectionParameters;
+
+ if (connectionParameters.loopback) {
+ reportError("Loopback connections aren't supported by DirectRTCClient.");
+ }
+
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ connectToRoomInternal();
+ }
+ });
+ }
+
+ @Override
+ public void disconnectFromRoom() {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ disconnectFromRoomInternal();
+ }
+ });
+ }
+
+ /**
+ * Connects to the room.
+ *
+ * Runs on the looper thread.
+ */
+ private void connectToRoomInternal() {
+ this.roomState = ConnectionState.NEW;
+
+ String endpoint = connectionParameters.roomId;
+
+ Matcher matcher = IP_PATTERN.matcher(endpoint);
+ if (!matcher.matches()) {
+ reportError("roomId must match IP_PATTERN for DirectRTCClient.");
+ return;
+ }
+
+ String ip = matcher.group(1);
+ String portStr = matcher.group(matcher.groupCount());
+ int port;
+
+ if (portStr != null) {
+ try {
+ port = Integer.parseInt(portStr);
+ } catch (NumberFormatException e) {
+ reportError("Invalid port number: " + portStr);
+ return;
+ }
+ } else {
+ port = DEFAULT_PORT;
+ }
+
+ tcpClient = new TCPChannelClient(executor, this, ip, port);
+ }
+
+ /**
+ * Disconnects from the room.
+ *
+ * Runs on the looper thread.
+ */
+ private void disconnectFromRoomInternal() {
+ roomState = ConnectionState.CLOSED;
+
+ if (tcpClient != null) {
+ tcpClient.disconnect();
+ tcpClient = null;
+ }
+ executor.shutdown();
+ }
+
+ @Override
+ public void sendOfferSdp(final SessionDescription sdp) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ if (roomState != ConnectionState.CONNECTED) {
+ reportError("Sending offer SDP in non connected state.");
+ return;
+ }
+ JSONObject json = new JSONObject();
+ jsonPut(json, "sdp", sdp.description);
+ jsonPut(json, "type", "offer");
+ sendMessage(json.toString());
+ }
+ });
+ }
+
+ @Override
+ public void sendAnswerSdp(final SessionDescription sdp) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ JSONObject json = new JSONObject();
+ jsonPut(json, "sdp", sdp.description);
+ jsonPut(json, "type", "answer");
+ sendMessage(json.toString());
+ }
+ });
+ }
+
+ @Override
+ public void sendLocalIceCandidate(final IceCandidate candidate) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ JSONObject json = new JSONObject();
+ jsonPut(json, "type", "candidate");
+ jsonPut(json, "label", candidate.sdpMLineIndex);
+ jsonPut(json, "id", candidate.sdpMid);
+ jsonPut(json, "candidate", candidate.sdp);
+
+ if (roomState != ConnectionState.CONNECTED) {
+ reportError("Sending ICE candidate in non connected state.");
+ return;
+ }
+ sendMessage(json.toString());
+ }
+ });
+ }
+
+ /** Send removed Ice candidates to the other participant. */
+ @Override
+ public void sendLocalIceCandidateRemovals(final IceCandidate[] candidates) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ JSONObject json = new JSONObject();
+ jsonPut(json, "type", "remove-candidates");
+ JSONArray jsonArray = new JSONArray();
+ for (final IceCandidate candidate : candidates) {
+ jsonArray.put(toJsonCandidate(candidate));
+ }
+ jsonPut(json, "candidates", jsonArray);
+
+ if (roomState != ConnectionState.CONNECTED) {
+ reportError("Sending ICE candidate removals in non connected state.");
+ return;
+ }
+ sendMessage(json.toString());
+ }
+ });
+ }
+
+ // -------------------------------------------------------------------
+ // TCPChannelClient event handlers
+
+ /**
+ * If the client is the server side, this will trigger onConnectedToRoom.
+ */
+ @Override
+ public void onTCPConnected(boolean isServer) {
+ if (isServer) {
+ roomState = ConnectionState.CONNECTED;
+
+ SignalingParameters parameters = new SignalingParameters(
+ // Ice servers are not needed for direct connections.
+ new ArrayList<>(),
+ isServer, // Server side acts as the initiator on direct connections.
+ null, // clientId
+ null, // wssUrl
+ null, // wwsPostUrl
+ null, // offerSdp
+ null // iceCandidates
+ );
+ events.onConnectedToRoom(parameters);
+ }
+ }
+
+ @Override
+ public void onTCPMessage(String msg) {
+ try {
+ JSONObject json = new JSONObject(msg);
+ String type = json.optString("type");
+ if (type.equals("candidate")) {
+ events.onRemoteIceCandidate(toJavaCandidate(json));
+ } else if (type.equals("remove-candidates")) {
+ JSONArray candidateArray = json.getJSONArray("candidates");
+ IceCandidate[] candidates = new IceCandidate[candidateArray.length()];
+ for (int i = 0; i < candidateArray.length(); ++i) {
+ candidates[i] = toJavaCandidate(candidateArray.getJSONObject(i));
+ }
+ events.onRemoteIceCandidatesRemoved(candidates);
+ } else if (type.equals("answer")) {
+ SessionDescription sdp = new SessionDescription(
+ SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
+ events.onRemoteDescription(sdp);
+ } else if (type.equals("offer")) {
+ SessionDescription sdp = new SessionDescription(
+ SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
+
+ SignalingParameters parameters = new SignalingParameters(
+ // Ice servers are not needed for direct connections.
+ new ArrayList<>(),
+ false, // This code will only be run on the client side. So, we are not the initiator.
+ null, // clientId
+ null, // wssUrl
+ null, // wssPostUrl
+ sdp, // offerSdp
+ null // iceCandidates
+ );
+ roomState = ConnectionState.CONNECTED;
+ events.onConnectedToRoom(parameters);
+ } else {
+ reportError("Unexpected TCP message: " + msg);
+ }
+ } catch (JSONException e) {
+ reportError("TCP message JSON parsing error: " + e.toString());
+ }
+ }
+
+ @Override
+ public void onTCPError(String description) {
+ reportError("TCP connection error: " + description);
+ }
+
+ @Override
+ public void onTCPClose() {
+ events.onChannelClose();
+ }
+
+ // --------------------------------------------------------------------
+ // Helper functions.
+ private void reportError(final String errorMessage) {
+ Log.e(TAG, errorMessage);
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ if (roomState != ConnectionState.ERROR) {
+ roomState = ConnectionState.ERROR;
+ events.onChannelError(errorMessage);
+ }
+ }
+ });
+ }
+
+ private void sendMessage(final String message) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ tcpClient.send(message);
+ }
+ });
+ }
+
+ // Put a `key`->`value` mapping in `json`.
+ private static void jsonPut(JSONObject json, String key, Object value) {
+ try {
+ json.put(key, value);
+ } catch (JSONException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ // Converts a Java candidate to a JSONObject.
+ private static JSONObject toJsonCandidate(final IceCandidate candidate) {
+ JSONObject json = new JSONObject();
+ jsonPut(json, "label", candidate.sdpMLineIndex);
+ jsonPut(json, "id", candidate.sdpMid);
+ jsonPut(json, "candidate", candidate.sdp);
+ return json;
+ }
+
+ // Converts a JSON candidate to a Java object.
+ private static IceCandidate toJavaCandidate(JSONObject json) throws JSONException {
+ return new IceCandidate(
+ json.getString("id"), json.getInt("label"), json.getString("candidate"));
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/HudFragment.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/HudFragment.java
new file mode 100644
index 0000000000..94ca05549a
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/HudFragment.java
@@ -0,0 +1,102 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.app.Fragment;
+import android.os.Bundle;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.ImageButton;
+import android.widget.TextView;
+import org.webrtc.RTCStats;
+import org.webrtc.RTCStatsReport;
+
+/**
+ * Fragment for HUD statistics display.
+ */
+public class HudFragment extends Fragment {
+ private TextView statView;
+ private ImageButton toggleDebugButton;
+ private boolean displayHud;
+ private volatile boolean isRunning;
+ private CpuMonitor cpuMonitor;
+
+ @Override
+ public View onCreateView(
+ LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
+ View controlView = inflater.inflate(R.layout.fragment_hud, container, false);
+
+ // Create UI controls.
+ statView = controlView.findViewById(R.id.hud_stat_call);
+ toggleDebugButton = controlView.findViewById(R.id.button_toggle_debug);
+
+ toggleDebugButton.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ if (displayHud) {
+ statView.setVisibility(
+ statView.getVisibility() == View.VISIBLE ? View.INVISIBLE : View.VISIBLE);
+ }
+ }
+ });
+
+ return controlView;
+ }
+
+ @Override
+ public void onStart() {
+ super.onStart();
+
+ Bundle args = getArguments();
+ if (args != null) {
+ displayHud = args.getBoolean(CallActivity.EXTRA_DISPLAY_HUD, false);
+ }
+ int visibility = displayHud ? View.VISIBLE : View.INVISIBLE;
+ statView.setVisibility(View.INVISIBLE);
+ toggleDebugButton.setVisibility(visibility);
+ isRunning = true;
+ }
+
+ @Override
+ public void onStop() {
+ isRunning = false;
+ super.onStop();
+ }
+
+ public void setCpuMonitor(CpuMonitor cpuMonitor) {
+ this.cpuMonitor = cpuMonitor;
+ }
+
+ public void updateEncoderStatistics(final RTCStatsReport report) {
+ if (!isRunning || !displayHud) {
+ return;
+ }
+
+ StringBuilder sb = new StringBuilder();
+
+ if (cpuMonitor != null) {
+ sb.append("CPU%: ")
+ .append(cpuMonitor.getCpuUsageCurrent())
+ .append("/")
+ .append(cpuMonitor.getCpuUsageAverage())
+ .append(". Freq: ")
+ .append(cpuMonitor.getFrequencyScaleAverage())
+ .append("\n");
+ }
+
+ for (RTCStats stat : report.getStatsMap().values()) {
+ sb.append(stat.toString()).append("\n");
+ }
+
+ statView.setText(sb.toString());
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java
new file mode 100644
index 0000000000..398b0c3b5e
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java
@@ -0,0 +1,1400 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.content.Context;
+import android.os.Environment;
+import android.os.ParcelFileDescriptor;
+import android.util.Log;
+import androidx.annotation.Nullable;
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.charset.Charset;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+import java.util.Timer;
+import java.util.TimerTask;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import org.appspot.apprtc.AppRTCClient.SignalingParameters;
+import org.appspot.apprtc.RecordedAudioToFileController;
+import org.webrtc.AddIceObserver;
+import org.webrtc.AudioSource;
+import org.webrtc.AudioTrack;
+import org.webrtc.CameraVideoCapturer;
+import org.webrtc.CandidatePairChangeEvent;
+import org.webrtc.DataChannel;
+import org.webrtc.DefaultVideoDecoderFactory;
+import org.webrtc.DefaultVideoEncoderFactory;
+import org.webrtc.EglBase;
+import org.webrtc.IceCandidate;
+import org.webrtc.IceCandidateErrorEvent;
+import org.webrtc.Logging;
+import org.webrtc.MediaConstraints;
+import org.webrtc.MediaStream;
+import org.webrtc.MediaStreamTrack;
+import org.webrtc.PeerConnection;
+import org.webrtc.PeerConnection.IceConnectionState;
+import org.webrtc.PeerConnection.PeerConnectionState;
+import org.webrtc.PeerConnectionFactory;
+import org.webrtc.RTCStatsCollectorCallback;
+import org.webrtc.RTCStatsReport;
+import org.webrtc.RtpParameters;
+import org.webrtc.RtpReceiver;
+import org.webrtc.RtpSender;
+import org.webrtc.RtpTransceiver;
+import org.webrtc.SdpObserver;
+import org.webrtc.SessionDescription;
+import org.webrtc.SoftwareVideoDecoderFactory;
+import org.webrtc.SoftwareVideoEncoderFactory;
+import org.webrtc.SurfaceTextureHelper;
+import org.webrtc.VideoCapturer;
+import org.webrtc.VideoDecoderFactory;
+import org.webrtc.VideoEncoderFactory;
+import org.webrtc.VideoSink;
+import org.webrtc.VideoSource;
+import org.webrtc.VideoTrack;
+import org.webrtc.audio.AudioDeviceModule;
+import org.webrtc.audio.JavaAudioDeviceModule;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordErrorCallback;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStateCallback;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackErrorCallback;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStateCallback;
+
+/**
+ * Peer connection client implementation.
+ *
+ * <p>All public methods are routed to local looper thread.
+ * All PeerConnectionEvents callbacks are invoked from the same looper thread.
+ * This class is a singleton.
+ */
+public class PeerConnectionClient {
+ public static final String VIDEO_TRACK_ID = "ARDAMSv0";
+ public static final String AUDIO_TRACK_ID = "ARDAMSa0";
+ public static final String VIDEO_TRACK_TYPE = "video";
+ private static final String TAG = "PCRTCClient";
+ private static final String VIDEO_CODEC_VP8 = "VP8";
+ private static final String VIDEO_CODEC_VP9 = "VP9";
+ private static final String VIDEO_CODEC_H264 = "H264";
+ private static final String VIDEO_CODEC_H264_BASELINE = "H264 Baseline";
+ private static final String VIDEO_CODEC_H264_HIGH = "H264 High";
+ private static final String VIDEO_CODEC_AV1 = "AV1";
+ private static final String AUDIO_CODEC_OPUS = "opus";
+ private static final String AUDIO_CODEC_ISAC = "ISAC";
+ private static final String VIDEO_CODEC_PARAM_START_BITRATE = "x-google-start-bitrate";
+ private static final String VIDEO_FLEXFEC_FIELDTRIAL =
+ "WebRTC-FlexFEC-03-Advertised/Enabled/WebRTC-FlexFEC-03/Enabled/";
+ private static final String DISABLE_WEBRTC_AGC_FIELDTRIAL =
+ "WebRTC-Audio-MinimizeResamplingOnMobile/Enabled/";
+ private static final String AUDIO_CODEC_PARAM_BITRATE = "maxaveragebitrate";
+ private static final String AUDIO_ECHO_CANCELLATION_CONSTRAINT = "googEchoCancellation";
+ private static final String AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT = "googAutoGainControl";
+ private static final String AUDIO_HIGH_PASS_FILTER_CONSTRAINT = "googHighpassFilter";
+ private static final String AUDIO_NOISE_SUPPRESSION_CONSTRAINT = "googNoiseSuppression";
+ private static final String DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT = "DtlsSrtpKeyAgreement";
+ private static final int HD_VIDEO_WIDTH = 1280;
+ private static final int HD_VIDEO_HEIGHT = 720;
+ private static final int BPS_IN_KBPS = 1000;
+ private static final String RTCEVENTLOG_OUTPUT_DIR_NAME = "rtc_event_log";
+
+ // Executor thread is started once in private ctor and is used for all
+ // peer connection API calls to ensure new peer connection factory is
+ // created on the same thread as previously destroyed factory.
+ private static final ExecutorService executor = Executors.newSingleThreadExecutor();
+
+ private final PCObserver pcObserver = new PCObserver();
+ private final SDPObserver sdpObserver = new SDPObserver();
+ private final Timer statsTimer = new Timer();
+ private final EglBase rootEglBase;
+ private final Context appContext;
+ private final PeerConnectionParameters peerConnectionParameters;
+ private final PeerConnectionEvents events;
+
+ @Nullable
+ private PeerConnectionFactory factory;
+ @Nullable
+ private PeerConnection peerConnection;
+ @Nullable
+ private AudioSource audioSource;
+ @Nullable private SurfaceTextureHelper surfaceTextureHelper;
+ @Nullable private VideoSource videoSource;
+ private boolean preferIsac;
+ private boolean videoCapturerStopped;
+ private boolean isError;
+ @Nullable
+ private VideoSink localRender;
+ @Nullable private List<VideoSink> remoteSinks;
+ private SignalingParameters signalingParameters;
+ private int videoWidth;
+ private int videoHeight;
+ private int videoFps;
+ private MediaConstraints audioConstraints;
+ private MediaConstraints sdpMediaConstraints;
+ // Queued remote ICE candidates are consumed only after both local and
+ // remote descriptions are set. Similarly local ICE candidates are sent to
+ // remote peer after both local and remote description are set.
+ @Nullable
+ private List<IceCandidate> queuedRemoteCandidates;
+ private boolean isInitiator;
+ @Nullable private SessionDescription localDescription; // either offer or answer description
+ @Nullable
+ private VideoCapturer videoCapturer;
+ // enableVideo is set to true if video should be rendered and sent.
+ private boolean renderVideo = true;
+ @Nullable
+ private VideoTrack localVideoTrack;
+ @Nullable
+ private VideoTrack remoteVideoTrack;
+ @Nullable
+ private RtpSender localVideoSender;
+ // enableAudio is set to true if audio should be sent.
+ private boolean enableAudio = true;
+ @Nullable
+ private AudioTrack localAudioTrack;
+ @Nullable
+ private DataChannel dataChannel;
+ private final boolean dataChannelEnabled;
+ // Enable RtcEventLog.
+ @Nullable
+ private RtcEventLog rtcEventLog;
+ // Implements the WebRtcAudioRecordSamplesReadyCallback interface and writes
+ // recorded audio samples to an output file.
+ @Nullable private RecordedAudioToFileController saveRecordedAudioToFile;
+
+ /**
+ * Peer connection parameters.
+ */
+ public static class DataChannelParameters {
+ public final boolean ordered;
+ public final int maxRetransmitTimeMs;
+ public final int maxRetransmits;
+ public final String protocol;
+ public final boolean negotiated;
+ public final int id;
+
+ public DataChannelParameters(boolean ordered, int maxRetransmitTimeMs, int maxRetransmits,
+ String protocol, boolean negotiated, int id) {
+ this.ordered = ordered;
+ this.maxRetransmitTimeMs = maxRetransmitTimeMs;
+ this.maxRetransmits = maxRetransmits;
+ this.protocol = protocol;
+ this.negotiated = negotiated;
+ this.id = id;
+ }
+ }
+
+ /**
+ * Peer connection parameters.
+ */
+ public static class PeerConnectionParameters {
+ public final boolean videoCallEnabled;
+ public final boolean loopback;
+ public final boolean tracing;
+ public final int videoWidth;
+ public final int videoHeight;
+ public final int videoFps;
+ public final int videoMaxBitrate;
+ public final String videoCodec;
+ public final boolean videoCodecHwAcceleration;
+ public final boolean videoFlexfecEnabled;
+ public final int audioStartBitrate;
+ public final String audioCodec;
+ public final boolean noAudioProcessing;
+ public final boolean aecDump;
+ public final boolean saveInputAudioToFile;
+ public final boolean useOpenSLES;
+ public final boolean disableBuiltInAEC;
+ public final boolean disableBuiltInAGC;
+ public final boolean disableBuiltInNS;
+ public final boolean disableWebRtcAGCAndHPF;
+ public final boolean enableRtcEventLog;
+ private final DataChannelParameters dataChannelParameters;
+
+ public PeerConnectionParameters(boolean videoCallEnabled, boolean loopback, boolean tracing,
+ int videoWidth, int videoHeight, int videoFps, int videoMaxBitrate, String videoCodec,
+ boolean videoCodecHwAcceleration, boolean videoFlexfecEnabled, int audioStartBitrate,
+ String audioCodec, boolean noAudioProcessing, boolean aecDump, boolean saveInputAudioToFile,
+ boolean useOpenSLES, boolean disableBuiltInAEC, boolean disableBuiltInAGC,
+ boolean disableBuiltInNS, boolean disableWebRtcAGCAndHPF, boolean enableRtcEventLog,
+ DataChannelParameters dataChannelParameters) {
+ this.videoCallEnabled = videoCallEnabled;
+ this.loopback = loopback;
+ this.tracing = tracing;
+ this.videoWidth = videoWidth;
+ this.videoHeight = videoHeight;
+ this.videoFps = videoFps;
+ this.videoMaxBitrate = videoMaxBitrate;
+ this.videoCodec = videoCodec;
+ this.videoFlexfecEnabled = videoFlexfecEnabled;
+ this.videoCodecHwAcceleration = videoCodecHwAcceleration;
+ this.audioStartBitrate = audioStartBitrate;
+ this.audioCodec = audioCodec;
+ this.noAudioProcessing = noAudioProcessing;
+ this.aecDump = aecDump;
+ this.saveInputAudioToFile = saveInputAudioToFile;
+ this.useOpenSLES = useOpenSLES;
+ this.disableBuiltInAEC = disableBuiltInAEC;
+ this.disableBuiltInAGC = disableBuiltInAGC;
+ this.disableBuiltInNS = disableBuiltInNS;
+ this.disableWebRtcAGCAndHPF = disableWebRtcAGCAndHPF;
+ this.enableRtcEventLog = enableRtcEventLog;
+ this.dataChannelParameters = dataChannelParameters;
+ }
+ }
+
+ /**
+ * Peer connection events.
+ */
+ public interface PeerConnectionEvents {
+ /**
+ * Callback fired once local SDP is created and set.
+ */
+ void onLocalDescription(final SessionDescription sdp);
+
+ /**
+ * Callback fired once local Ice candidate is generated.
+ */
+ void onIceCandidate(final IceCandidate candidate);
+
+ /**
+ * Callback fired once local ICE candidates are removed.
+ */
+ void onIceCandidatesRemoved(final IceCandidate[] candidates);
+
+ /**
+ * Callback fired once connection is established (IceConnectionState is
+ * CONNECTED).
+ */
+ void onIceConnected();
+
+ /**
+ * Callback fired once connection is disconnected (IceConnectionState is
+ * DISCONNECTED).
+ */
+ void onIceDisconnected();
+
+ /**
+ * Callback fired once DTLS connection is established (PeerConnectionState
+ * is CONNECTED).
+ */
+ void onConnected();
+
+ /**
+ * Callback fired once DTLS connection is disconnected (PeerConnectionState
+ * is DISCONNECTED).
+ */
+ void onDisconnected();
+
+ /**
+ * Callback fired once peer connection is closed.
+ */
+ void onPeerConnectionClosed();
+
+ /**
+ * Callback fired once peer connection statistics is ready.
+ */
+ void onPeerConnectionStatsReady(final RTCStatsReport report);
+
+ /**
+ * Callback fired once peer connection error happened.
+ */
+ void onPeerConnectionError(final String description);
+ }
+
+ /**
+ * Create a PeerConnectionClient with the specified parameters. PeerConnectionClient takes
+ * ownership of `eglBase`.
+ */
+ public PeerConnectionClient(Context appContext, EglBase eglBase,
+ PeerConnectionParameters peerConnectionParameters, PeerConnectionEvents events) {
+ this.rootEglBase = eglBase;
+ this.appContext = appContext;
+ this.events = events;
+ this.peerConnectionParameters = peerConnectionParameters;
+ this.dataChannelEnabled = peerConnectionParameters.dataChannelParameters != null;
+
+ Log.d(TAG, "Preferred video codec: " + getSdpVideoCodecName(peerConnectionParameters));
+
+ final String fieldTrials = getFieldTrials(peerConnectionParameters);
+ executor.execute(() -> {
+ Log.d(TAG, "Initialize WebRTC. Field trials: " + fieldTrials);
+ PeerConnectionFactory.initialize(
+ PeerConnectionFactory.InitializationOptions.builder(appContext)
+ .setFieldTrials(fieldTrials)
+ .setEnableInternalTracer(true)
+ .createInitializationOptions());
+ });
+ }
+
+ /**
+ * This function should only be called once.
+ */
+ public void createPeerConnectionFactory(PeerConnectionFactory.Options options) {
+ if (factory != null) {
+ throw new IllegalStateException("PeerConnectionFactory has already been constructed");
+ }
+ executor.execute(() -> createPeerConnectionFactoryInternal(options));
+ }
+
+ public void createPeerConnection(final VideoSink localRender, final VideoSink remoteSink,
+ final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) {
+ if (peerConnectionParameters.videoCallEnabled && videoCapturer == null) {
+ Log.w(TAG, "Video call enabled but no video capturer provided.");
+ }
+ createPeerConnection(
+ localRender, Collections.singletonList(remoteSink), videoCapturer, signalingParameters);
+ }
+
+ public void createPeerConnection(final VideoSink localRender, final List<VideoSink> remoteSinks,
+ final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) {
+ if (peerConnectionParameters == null) {
+ Log.e(TAG, "Creating peer connection without initializing factory.");
+ return;
+ }
+ this.localRender = localRender;
+ this.remoteSinks = remoteSinks;
+ this.videoCapturer = videoCapturer;
+ this.signalingParameters = signalingParameters;
+ executor.execute(() -> {
+ try {
+ createMediaConstraintsInternal();
+ createPeerConnectionInternal();
+ maybeCreateAndStartRtcEventLog();
+ } catch (Exception e) {
+ reportError("Failed to create peer connection: " + e.getMessage());
+ throw e;
+ }
+ });
+ }
+
+ public void close() {
+ executor.execute(this ::closeInternal);
+ }
+
+ private boolean isVideoCallEnabled() {
+ return peerConnectionParameters.videoCallEnabled && videoCapturer != null;
+ }
+
+ private void createPeerConnectionFactoryInternal(PeerConnectionFactory.Options options) {
+ isError = false;
+
+ if (peerConnectionParameters.tracing) {
+ PeerConnectionFactory.startInternalTracingCapture(
+ Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator
+ + "webrtc-trace.txt");
+ }
+
+ // Check if ISAC is used by default.
+ preferIsac = peerConnectionParameters.audioCodec != null
+ && peerConnectionParameters.audioCodec.equals(AUDIO_CODEC_ISAC);
+
+ // It is possible to save a copy in raw PCM format on a file by checking
+ // the "Save input audio to file" checkbox in the Settings UI. A callback
+ // interface is set when this flag is enabled. As a result, a copy of recorded
+ // audio samples are provided to this client directly from the native audio
+ // layer in Java.
+ if (peerConnectionParameters.saveInputAudioToFile) {
+ if (!peerConnectionParameters.useOpenSLES) {
+ Log.d(TAG, "Enable recording of microphone input audio to file");
+ saveRecordedAudioToFile = new RecordedAudioToFileController(executor);
+ } else {
+ // TODO(henrika): ensure that the UI reflects that if OpenSL ES is selected,
+ // then the "Save inut audio to file" option shall be grayed out.
+ Log.e(TAG, "Recording of input audio is not supported for OpenSL ES");
+ }
+ }
+
+ final AudioDeviceModule adm = createJavaAudioDevice();
+
+ // Create peer connection factory.
+ if (options != null) {
+ Log.d(TAG, "Factory networkIgnoreMask option: " + options.networkIgnoreMask);
+ }
+ final boolean enableH264HighProfile =
+ VIDEO_CODEC_H264_HIGH.equals(peerConnectionParameters.videoCodec);
+ final VideoEncoderFactory encoderFactory;
+ final VideoDecoderFactory decoderFactory;
+
+ if (peerConnectionParameters.videoCodecHwAcceleration) {
+ encoderFactory = new DefaultVideoEncoderFactory(
+ rootEglBase.getEglBaseContext(), true /* enableIntelVp8Encoder */, enableH264HighProfile);
+ decoderFactory = new DefaultVideoDecoderFactory(rootEglBase.getEglBaseContext());
+ } else {
+ encoderFactory = new SoftwareVideoEncoderFactory();
+ decoderFactory = new SoftwareVideoDecoderFactory();
+ }
+
+ // Disable encryption for loopback calls.
+ if (peerConnectionParameters.loopback) {
+ options.disableEncryption = true;
+ }
+ factory = PeerConnectionFactory.builder()
+ .setOptions(options)
+ .setAudioDeviceModule(adm)
+ .setVideoEncoderFactory(encoderFactory)
+ .setVideoDecoderFactory(decoderFactory)
+ .createPeerConnectionFactory();
+ Log.d(TAG, "Peer connection factory created.");
+ adm.release();
+ }
+
+ AudioDeviceModule createJavaAudioDevice() {
+ // Enable/disable OpenSL ES playback.
+ if (!peerConnectionParameters.useOpenSLES) {
+ Log.w(TAG, "External OpenSLES ADM not implemented yet.");
+ // TODO(magjed): Add support for external OpenSLES ADM.
+ }
+
+ // Set audio record error callbacks.
+ AudioRecordErrorCallback audioRecordErrorCallback = new AudioRecordErrorCallback() {
+ @Override
+ public void onWebRtcAudioRecordInitError(String errorMessage) {
+ Log.e(TAG, "onWebRtcAudioRecordInitError: " + errorMessage);
+ reportError(errorMessage);
+ }
+
+ @Override
+ public void onWebRtcAudioRecordStartError(
+ JavaAudioDeviceModule.AudioRecordStartErrorCode errorCode, String errorMessage) {
+ Log.e(TAG, "onWebRtcAudioRecordStartError: " + errorCode + ". " + errorMessage);
+ reportError(errorMessage);
+ }
+
+ @Override
+ public void onWebRtcAudioRecordError(String errorMessage) {
+ Log.e(TAG, "onWebRtcAudioRecordError: " + errorMessage);
+ reportError(errorMessage);
+ }
+ };
+
+ AudioTrackErrorCallback audioTrackErrorCallback = new AudioTrackErrorCallback() {
+ @Override
+ public void onWebRtcAudioTrackInitError(String errorMessage) {
+ Log.e(TAG, "onWebRtcAudioTrackInitError: " + errorMessage);
+ reportError(errorMessage);
+ }
+
+ @Override
+ public void onWebRtcAudioTrackStartError(
+ JavaAudioDeviceModule.AudioTrackStartErrorCode errorCode, String errorMessage) {
+ Log.e(TAG, "onWebRtcAudioTrackStartError: " + errorCode + ". " + errorMessage);
+ reportError(errorMessage);
+ }
+
+ @Override
+ public void onWebRtcAudioTrackError(String errorMessage) {
+ Log.e(TAG, "onWebRtcAudioTrackError: " + errorMessage);
+ reportError(errorMessage);
+ }
+ };
+
+ // Set audio record state callbacks.
+ AudioRecordStateCallback audioRecordStateCallback = new AudioRecordStateCallback() {
+ @Override
+ public void onWebRtcAudioRecordStart() {
+ Log.i(TAG, "Audio recording starts");
+ }
+
+ @Override
+ public void onWebRtcAudioRecordStop() {
+ Log.i(TAG, "Audio recording stops");
+ }
+ };
+
+ // Set audio track state callbacks.
+ AudioTrackStateCallback audioTrackStateCallback = new AudioTrackStateCallback() {
+ @Override
+ public void onWebRtcAudioTrackStart() {
+ Log.i(TAG, "Audio playout starts");
+ }
+
+ @Override
+ public void onWebRtcAudioTrackStop() {
+ Log.i(TAG, "Audio playout stops");
+ }
+ };
+
+ return JavaAudioDeviceModule.builder(appContext)
+ .setSamplesReadyCallback(saveRecordedAudioToFile)
+ .setUseHardwareAcousticEchoCanceler(!peerConnectionParameters.disableBuiltInAEC)
+ .setUseHardwareNoiseSuppressor(!peerConnectionParameters.disableBuiltInNS)
+ .setAudioRecordErrorCallback(audioRecordErrorCallback)
+ .setAudioTrackErrorCallback(audioTrackErrorCallback)
+ .setAudioRecordStateCallback(audioRecordStateCallback)
+ .setAudioTrackStateCallback(audioTrackStateCallback)
+ .createAudioDeviceModule();
+ }
+
+ private void createMediaConstraintsInternal() {
+ // Create video constraints if video call is enabled.
+ if (isVideoCallEnabled()) {
+ videoWidth = peerConnectionParameters.videoWidth;
+ videoHeight = peerConnectionParameters.videoHeight;
+ videoFps = peerConnectionParameters.videoFps;
+
+ // If video resolution is not specified, default to HD.
+ if (videoWidth == 0 || videoHeight == 0) {
+ videoWidth = HD_VIDEO_WIDTH;
+ videoHeight = HD_VIDEO_HEIGHT;
+ }
+
+ // If fps is not specified, default to 30.
+ if (videoFps == 0) {
+ videoFps = 30;
+ }
+ Logging.d(TAG, "Capturing format: " + videoWidth + "x" + videoHeight + "@" + videoFps);
+ }
+
+ // Create audio constraints.
+ audioConstraints = new MediaConstraints();
+ // added for audio performance measurements
+ if (peerConnectionParameters.noAudioProcessing) {
+ Log.d(TAG, "Disabling audio processing");
+ audioConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair(AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false"));
+ audioConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair(AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false"));
+ audioConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair(AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false"));
+ audioConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair(AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "false"));
+ }
+ // Create SDP constraints.
+ sdpMediaConstraints = new MediaConstraints();
+ sdpMediaConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
+ sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
+ "OfferToReceiveVideo", Boolean.toString(isVideoCallEnabled())));
+ }
+
+ private void createPeerConnectionInternal() {
+ if (factory == null || isError) {
+ Log.e(TAG, "Peerconnection factory is not created");
+ return;
+ }
+ Log.d(TAG, "Create peer connection.");
+
+ queuedRemoteCandidates = new ArrayList<>();
+
+ PeerConnection.RTCConfiguration rtcConfig =
+ new PeerConnection.RTCConfiguration(signalingParameters.iceServers);
+ // TCP candidates are only useful when connecting to a server that supports
+ // ICE-TCP.
+ rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED;
+ rtcConfig.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE;
+ rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE;
+ rtcConfig.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY;
+ // Use ECDSA encryption.
+ rtcConfig.keyType = PeerConnection.KeyType.ECDSA;
+ rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
+
+ peerConnection = factory.createPeerConnection(rtcConfig, pcObserver);
+
+ if (dataChannelEnabled) {
+ DataChannel.Init init = new DataChannel.Init();
+ init.ordered = peerConnectionParameters.dataChannelParameters.ordered;
+ init.negotiated = peerConnectionParameters.dataChannelParameters.negotiated;
+ init.maxRetransmits = peerConnectionParameters.dataChannelParameters.maxRetransmits;
+ init.maxRetransmitTimeMs = peerConnectionParameters.dataChannelParameters.maxRetransmitTimeMs;
+ init.id = peerConnectionParameters.dataChannelParameters.id;
+ init.protocol = peerConnectionParameters.dataChannelParameters.protocol;
+ dataChannel = peerConnection.createDataChannel("ApprtcDemo data", init);
+ }
+ isInitiator = false;
+
+ // Set INFO libjingle logging.
+ // NOTE: this _must_ happen while `factory` is alive!
+ Logging.enableLogToDebugOutput(Logging.Severity.LS_INFO);
+
+ List<String> mediaStreamLabels = Collections.singletonList("ARDAMS");
+ if (isVideoCallEnabled()) {
+ peerConnection.addTrack(createVideoTrack(videoCapturer), mediaStreamLabels);
+ // We can add the renderers right away because we don't need to wait for an
+ // answer to get the remote track.
+ remoteVideoTrack = getRemoteVideoTrack();
+ remoteVideoTrack.setEnabled(renderVideo);
+ for (VideoSink remoteSink : remoteSinks) {
+ remoteVideoTrack.addSink(remoteSink);
+ }
+ }
+ peerConnection.addTrack(createAudioTrack(), mediaStreamLabels);
+ if (isVideoCallEnabled()) {
+ findVideoSender();
+ }
+
+ if (peerConnectionParameters.aecDump) {
+ try {
+ ParcelFileDescriptor aecDumpFileDescriptor =
+ ParcelFileDescriptor.open(new File(Environment.getExternalStorageDirectory().getPath()
+ + File.separator + "Download/audio.aecdump"),
+ ParcelFileDescriptor.MODE_READ_WRITE | ParcelFileDescriptor.MODE_CREATE
+ | ParcelFileDescriptor.MODE_TRUNCATE);
+ factory.startAecDump(aecDumpFileDescriptor.detachFd(), -1);
+ } catch (IOException e) {
+ Log.e(TAG, "Can not open aecdump file", e);
+ }
+ }
+
+ if (saveRecordedAudioToFile != null) {
+ if (saveRecordedAudioToFile.start()) {
+ Log.d(TAG, "Recording input audio to file is activated");
+ }
+ }
+ Log.d(TAG, "Peer connection created.");
+ }
+
+ private File createRtcEventLogOutputFile() {
+ DateFormat dateFormat = new SimpleDateFormat("yyyyMMdd_hhmm_ss", Locale.getDefault());
+ Date date = new Date();
+ final String outputFileName = "event_log_" + dateFormat.format(date) + ".log";
+ return new File(
+ appContext.getDir(RTCEVENTLOG_OUTPUT_DIR_NAME, Context.MODE_PRIVATE), outputFileName);
+ }
+
+ private void maybeCreateAndStartRtcEventLog() {
+ if (appContext == null || peerConnection == null) {
+ return;
+ }
+ if (!peerConnectionParameters.enableRtcEventLog) {
+ Log.d(TAG, "RtcEventLog is disabled.");
+ return;
+ }
+ rtcEventLog = new RtcEventLog(peerConnection);
+ rtcEventLog.start(createRtcEventLogOutputFile());
+ }
+
+ private void closeInternal() {
+ if (factory != null && peerConnectionParameters.aecDump) {
+ factory.stopAecDump();
+ }
+ Log.d(TAG, "Closing peer connection.");
+ statsTimer.cancel();
+ if (dataChannel != null) {
+ dataChannel.dispose();
+ dataChannel = null;
+ }
+ if (rtcEventLog != null) {
+ // RtcEventLog should stop before the peer connection is disposed.
+ rtcEventLog.stop();
+ rtcEventLog = null;
+ }
+ if (peerConnection != null) {
+ peerConnection.dispose();
+ peerConnection = null;
+ }
+ Log.d(TAG, "Closing audio source.");
+ if (audioSource != null) {
+ audioSource.dispose();
+ audioSource = null;
+ }
+ Log.d(TAG, "Stopping capture.");
+ if (videoCapturer != null) {
+ try {
+ videoCapturer.stopCapture();
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ videoCapturerStopped = true;
+ videoCapturer.dispose();
+ videoCapturer = null;
+ }
+ Log.d(TAG, "Closing video source.");
+ if (videoSource != null) {
+ videoSource.dispose();
+ videoSource = null;
+ }
+ if (surfaceTextureHelper != null) {
+ surfaceTextureHelper.dispose();
+ surfaceTextureHelper = null;
+ }
+ if (saveRecordedAudioToFile != null) {
+ Log.d(TAG, "Closing audio file for recorded input audio.");
+ saveRecordedAudioToFile.stop();
+ saveRecordedAudioToFile = null;
+ }
+ localRender = null;
+ remoteSinks = null;
+ Log.d(TAG, "Closing peer connection factory.");
+ if (factory != null) {
+ factory.dispose();
+ factory = null;
+ }
+ rootEglBase.release();
+ Log.d(TAG, "Closing peer connection done.");
+ events.onPeerConnectionClosed();
+ PeerConnectionFactory.stopInternalTracingCapture();
+ PeerConnectionFactory.shutdownInternalTracer();
+ }
+
+ public boolean isHDVideo() {
+ return isVideoCallEnabled() && videoWidth * videoHeight >= 1280 * 720;
+ }
+
+ private void getStats() {
+ if (peerConnection == null || isError) {
+ return;
+ }
+ peerConnection.getStats(new RTCStatsCollectorCallback() {
+ @Override
+ public void onStatsDelivered(RTCStatsReport report) {
+ events.onPeerConnectionStatsReady(report);
+ }
+ });
+ }
+
+ public void enableStatsEvents(boolean enable, int periodMs) {
+ if (enable) {
+ try {
+ statsTimer.schedule(new TimerTask() {
+ @Override
+ public void run() {
+ executor.execute(() -> getStats());
+ }
+ }, 0, periodMs);
+ } catch (Exception e) {
+ Log.e(TAG, "Can not schedule statistics timer", e);
+ }
+ } else {
+ statsTimer.cancel();
+ }
+ }
+
+ public void setAudioEnabled(final boolean enable) {
+ executor.execute(() -> {
+ enableAudio = enable;
+ if (localAudioTrack != null) {
+ localAudioTrack.setEnabled(enableAudio);
+ }
+ });
+ }
+
+ public void setVideoEnabled(final boolean enable) {
+ executor.execute(() -> {
+ renderVideo = enable;
+ if (localVideoTrack != null) {
+ localVideoTrack.setEnabled(renderVideo);
+ }
+ if (remoteVideoTrack != null) {
+ remoteVideoTrack.setEnabled(renderVideo);
+ }
+ });
+ }
+
+ public void createOffer() {
+ executor.execute(() -> {
+ if (peerConnection != null && !isError) {
+ Log.d(TAG, "PC Create OFFER");
+ isInitiator = true;
+ peerConnection.createOffer(sdpObserver, sdpMediaConstraints);
+ }
+ });
+ }
+
+ public void createAnswer() {
+ executor.execute(() -> {
+ if (peerConnection != null && !isError) {
+ Log.d(TAG, "PC create ANSWER");
+ isInitiator = false;
+ peerConnection.createAnswer(sdpObserver, sdpMediaConstraints);
+ }
+ });
+ }
+
+ public void addRemoteIceCandidate(final IceCandidate candidate) {
+ executor.execute(() -> {
+ if (peerConnection != null && !isError) {
+ if (queuedRemoteCandidates != null) {
+ queuedRemoteCandidates.add(candidate);
+ } else {
+ peerConnection.addIceCandidate(candidate, new AddIceObserver() {
+ @Override
+ public void onAddSuccess() {
+ Log.d(TAG, "Candidate " + candidate + " successfully added.");
+ }
+ @Override
+ public void onAddFailure(String error) {
+ Log.d(TAG, "Candidate " + candidate + " addition failed: " + error);
+ }
+ });
+ }
+ }
+ });
+ }
+
+ public void removeRemoteIceCandidates(final IceCandidate[] candidates) {
+ executor.execute(() -> {
+ if (peerConnection == null || isError) {
+ return;
+ }
+ // Drain the queued remote candidates if there is any so that
+ // they are processed in the proper order.
+ drainCandidates();
+ peerConnection.removeIceCandidates(candidates);
+ });
+ }
+
+ public void setRemoteDescription(final SessionDescription desc) {
+ executor.execute(() -> {
+ if (peerConnection == null || isError) {
+ return;
+ }
+ String sdp = desc.description;
+ if (preferIsac) {
+ sdp = preferCodec(sdp, AUDIO_CODEC_ISAC, true);
+ }
+ if (isVideoCallEnabled()) {
+ sdp = preferCodec(sdp, getSdpVideoCodecName(peerConnectionParameters), false);
+ }
+ if (peerConnectionParameters.audioStartBitrate > 0) {
+ sdp = setStartBitrate(
+ AUDIO_CODEC_OPUS, false, sdp, peerConnectionParameters.audioStartBitrate);
+ }
+ Log.d(TAG, "Set remote SDP.");
+ SessionDescription sdpRemote = new SessionDescription(desc.type, sdp);
+ peerConnection.setRemoteDescription(sdpObserver, sdpRemote);
+ });
+ }
+
+ public void stopVideoSource() {
+ executor.execute(() -> {
+ if (videoCapturer != null && !videoCapturerStopped) {
+ Log.d(TAG, "Stop video source.");
+ try {
+ videoCapturer.stopCapture();
+ } catch (InterruptedException e) {
+ }
+ videoCapturerStopped = true;
+ }
+ });
+ }
+
+ public void startVideoSource() {
+ executor.execute(() -> {
+ if (videoCapturer != null && videoCapturerStopped) {
+ Log.d(TAG, "Restart video source.");
+ videoCapturer.startCapture(videoWidth, videoHeight, videoFps);
+ videoCapturerStopped = false;
+ }
+ });
+ }
+
+ public void setVideoMaxBitrate(@Nullable final Integer maxBitrateKbps) {
+ executor.execute(() -> {
+ if (peerConnection == null || localVideoSender == null || isError) {
+ return;
+ }
+ Log.d(TAG, "Requested max video bitrate: " + maxBitrateKbps);
+ if (localVideoSender == null) {
+ Log.w(TAG, "Sender is not ready.");
+ return;
+ }
+
+ RtpParameters parameters = localVideoSender.getParameters();
+ if (parameters.encodings.size() == 0) {
+ Log.w(TAG, "RtpParameters are not ready.");
+ return;
+ }
+
+ for (RtpParameters.Encoding encoding : parameters.encodings) {
+ // Null value means no limit.
+ encoding.maxBitrateBps = maxBitrateKbps == null ? null : maxBitrateKbps * BPS_IN_KBPS;
+ }
+ if (!localVideoSender.setParameters(parameters)) {
+ Log.e(TAG, "RtpSender.setParameters failed.");
+ }
+ Log.d(TAG, "Configured max video bitrate to: " + maxBitrateKbps);
+ });
+ }
+
+ private void reportError(final String errorMessage) {
+ Log.e(TAG, "Peerconnection error: " + errorMessage);
+ executor.execute(() -> {
+ if (!isError) {
+ events.onPeerConnectionError(errorMessage);
+ isError = true;
+ }
+ });
+ }
+
+ @Nullable
+ private AudioTrack createAudioTrack() {
+ audioSource = factory.createAudioSource(audioConstraints);
+ localAudioTrack = factory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
+ localAudioTrack.setEnabled(enableAudio);
+ return localAudioTrack;
+ }
+
+ @Nullable
+ private VideoTrack createVideoTrack(VideoCapturer capturer) {
+ surfaceTextureHelper =
+ SurfaceTextureHelper.create("CaptureThread", rootEglBase.getEglBaseContext());
+ videoSource = factory.createVideoSource(capturer.isScreencast());
+ capturer.initialize(surfaceTextureHelper, appContext, videoSource.getCapturerObserver());
+ capturer.startCapture(videoWidth, videoHeight, videoFps);
+
+ localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
+ localVideoTrack.setEnabled(renderVideo);
+ localVideoTrack.addSink(localRender);
+ return localVideoTrack;
+ }
+
+ private void findVideoSender() {
+ for (RtpSender sender : peerConnection.getSenders()) {
+ if (sender.track() != null) {
+ String trackType = sender.track().kind();
+ if (trackType.equals(VIDEO_TRACK_TYPE)) {
+ Log.d(TAG, "Found video sender.");
+ localVideoSender = sender;
+ }
+ }
+ }
+ }
+
+ // Returns the remote VideoTrack, assuming there is only one.
+ private @Nullable VideoTrack getRemoteVideoTrack() {
+ for (RtpTransceiver transceiver : peerConnection.getTransceivers()) {
+ MediaStreamTrack track = transceiver.getReceiver().track();
+ if (track instanceof VideoTrack) {
+ return (VideoTrack) track;
+ }
+ }
+ return null;
+ }
+
+ private static String getSdpVideoCodecName(PeerConnectionParameters parameters) {
+ switch (parameters.videoCodec) {
+ case VIDEO_CODEC_VP8:
+ return VIDEO_CODEC_VP8;
+ case VIDEO_CODEC_VP9:
+ return VIDEO_CODEC_VP9;
+ case VIDEO_CODEC_AV1:
+ return VIDEO_CODEC_AV1;
+ case VIDEO_CODEC_H264_HIGH:
+ case VIDEO_CODEC_H264_BASELINE:
+ return VIDEO_CODEC_H264;
+ default:
+ return VIDEO_CODEC_VP8;
+ }
+ }
+
+ private static String getFieldTrials(PeerConnectionParameters peerConnectionParameters) {
+ String fieldTrials = "";
+ if (peerConnectionParameters.videoFlexfecEnabled) {
+ fieldTrials += VIDEO_FLEXFEC_FIELDTRIAL;
+ Log.d(TAG, "Enable FlexFEC field trial.");
+ }
+ if (peerConnectionParameters.disableWebRtcAGCAndHPF) {
+ fieldTrials += DISABLE_WEBRTC_AGC_FIELDTRIAL;
+ Log.d(TAG, "Disable WebRTC AGC field trial.");
+ }
+ return fieldTrials;
+ }
+
+ @SuppressWarnings("StringSplitter")
+ private static String setStartBitrate(
+ String codec, boolean isVideoCodec, String sdp, int bitrateKbps) {
+ String[] lines = sdp.split("\r\n");
+ int rtpmapLineIndex = -1;
+ boolean sdpFormatUpdated = false;
+ String codecRtpMap = null;
+ // Search for codec rtpmap in format
+ // a=rtpmap:<payload type> <encoding name>/<clock rate> [/<encoding parameters>]
+ String regex = "^a=rtpmap:(\\d+) " + codec + "(/\\d+)+[\r]?$";
+ Pattern codecPattern = Pattern.compile(regex);
+ for (int i = 0; i < lines.length; i++) {
+ Matcher codecMatcher = codecPattern.matcher(lines[i]);
+ if (codecMatcher.matches()) {
+ codecRtpMap = codecMatcher.group(1);
+ rtpmapLineIndex = i;
+ break;
+ }
+ }
+ if (codecRtpMap == null) {
+ Log.w(TAG, "No rtpmap for " + codec + " codec");
+ return sdp;
+ }
+ Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap + " at " + lines[rtpmapLineIndex]);
+
+ // Check if a=fmtp string already exist in remote SDP for this codec and
+ // update it with new bitrate parameter.
+ regex = "^a=fmtp:" + codecRtpMap + " \\w+=\\d+.*[\r]?$";
+ codecPattern = Pattern.compile(regex);
+ for (int i = 0; i < lines.length; i++) {
+ Matcher codecMatcher = codecPattern.matcher(lines[i]);
+ if (codecMatcher.matches()) {
+ Log.d(TAG, "Found " + codec + " " + lines[i]);
+ if (isVideoCodec) {
+ lines[i] += "; " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps;
+ } else {
+ lines[i] += "; " + AUDIO_CODEC_PARAM_BITRATE + "=" + (bitrateKbps * 1000);
+ }
+ Log.d(TAG, "Update remote SDP line: " + lines[i]);
+ sdpFormatUpdated = true;
+ break;
+ }
+ }
+
+ StringBuilder newSdpDescription = new StringBuilder();
+ for (int i = 0; i < lines.length; i++) {
+ newSdpDescription.append(lines[i]).append("\r\n");
+ // Append new a=fmtp line if no such line exist for a codec.
+ if (!sdpFormatUpdated && i == rtpmapLineIndex) {
+ String bitrateSet;
+ if (isVideoCodec) {
+ bitrateSet =
+ "a=fmtp:" + codecRtpMap + " " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps;
+ } else {
+ bitrateSet = "a=fmtp:" + codecRtpMap + " " + AUDIO_CODEC_PARAM_BITRATE + "="
+ + (bitrateKbps * 1000);
+ }
+ Log.d(TAG, "Add remote SDP line: " + bitrateSet);
+ newSdpDescription.append(bitrateSet).append("\r\n");
+ }
+ }
+ return newSdpDescription.toString();
+ }
+
+ /** Returns the line number containing "m=audio|video", or -1 if no such line exists. */
+ private static int findMediaDescriptionLine(boolean isAudio, String[] sdpLines) {
+ final String mediaDescription = isAudio ? "m=audio " : "m=video ";
+ for (int i = 0; i < sdpLines.length; ++i) {
+ if (sdpLines[i].startsWith(mediaDescription)) {
+ return i;
+ }
+ }
+ return -1;
+ }
+
+ private static String joinString(
+ Iterable<? extends CharSequence> s, String delimiter, boolean delimiterAtEnd) {
+ Iterator<? extends CharSequence> iter = s.iterator();
+ if (!iter.hasNext()) {
+ return "";
+ }
+ StringBuilder buffer = new StringBuilder(iter.next());
+ while (iter.hasNext()) {
+ buffer.append(delimiter).append(iter.next());
+ }
+ if (delimiterAtEnd) {
+ buffer.append(delimiter);
+ }
+ return buffer.toString();
+ }
+
+ private static @Nullable String movePayloadTypesToFront(
+ List<String> preferredPayloadTypes, String mLine) {
+ // The format of the media description line should be: m=<media> <port> <proto> <fmt> ...
+ final List<String> origLineParts = Arrays.asList(mLine.split(" "));
+ if (origLineParts.size() <= 3) {
+ Log.e(TAG, "Wrong SDP media description format: " + mLine);
+ return null;
+ }
+ final List<String> header = origLineParts.subList(0, 3);
+ final List<String> unpreferredPayloadTypes =
+ new ArrayList<>(origLineParts.subList(3, origLineParts.size()));
+ unpreferredPayloadTypes.removeAll(preferredPayloadTypes);
+ // Reconstruct the line with `preferredPayloadTypes` moved to the beginning of the payload
+ // types.
+ final List<String> newLineParts = new ArrayList<>();
+ newLineParts.addAll(header);
+ newLineParts.addAll(preferredPayloadTypes);
+ newLineParts.addAll(unpreferredPayloadTypes);
+ return joinString(newLineParts, " ", false /* delimiterAtEnd */);
+ }
+
+ private static String preferCodec(String sdp, String codec, boolean isAudio) {
+ final String[] lines = sdp.split("\r\n");
+ final int mLineIndex = findMediaDescriptionLine(isAudio, lines);
+ if (mLineIndex == -1) {
+ Log.w(TAG, "No mediaDescription line, so can't prefer " + codec);
+ return sdp;
+ }
+ // A list with all the payload types with name `codec`. The payload types are integers in the
+ // range 96-127, but they are stored as strings here.
+ final List<String> codecPayloadTypes = new ArrayList<>();
+ // a=rtpmap:<payload type> <encoding name>/<clock rate> [/<encoding parameters>]
+ final Pattern codecPattern = Pattern.compile("^a=rtpmap:(\\d+) " + codec + "(/\\d+)+[\r]?$");
+ for (String line : lines) {
+ Matcher codecMatcher = codecPattern.matcher(line);
+ if (codecMatcher.matches()) {
+ codecPayloadTypes.add(codecMatcher.group(1));
+ }
+ }
+ if (codecPayloadTypes.isEmpty()) {
+ Log.w(TAG, "No payload types with name " + codec);
+ return sdp;
+ }
+
+ final String newMLine = movePayloadTypesToFront(codecPayloadTypes, lines[mLineIndex]);
+ if (newMLine == null) {
+ return sdp;
+ }
+ Log.d(TAG, "Change media description from: " + lines[mLineIndex] + " to " + newMLine);
+ lines[mLineIndex] = newMLine;
+ return joinString(Arrays.asList(lines), "\r\n", true /* delimiterAtEnd */);
+ }
+
+ private void drainCandidates() {
+ if (queuedRemoteCandidates != null) {
+ Log.d(TAG, "Add " + queuedRemoteCandidates.size() + " remote candidates");
+ for (IceCandidate candidate : queuedRemoteCandidates) {
+ peerConnection.addIceCandidate(candidate, new AddIceObserver() {
+ @Override
+ public void onAddSuccess() {
+ Log.d(TAG, "Candidate " + candidate + " successfully added.");
+ }
+ @Override
+ public void onAddFailure(String error) {
+ Log.d(TAG, "Candidate " + candidate + " addition failed: " + error);
+ }
+ });
+ }
+ queuedRemoteCandidates = null;
+ }
+ }
+
+ private void switchCameraInternal() {
+ if (videoCapturer instanceof CameraVideoCapturer) {
+ if (!isVideoCallEnabled() || isError) {
+ Log.e(TAG,
+ "Failed to switch camera. Video: " + isVideoCallEnabled() + ". Error : " + isError);
+ return; // No video is sent or only one camera is available or error happened.
+ }
+ Log.d(TAG, "Switch camera");
+ CameraVideoCapturer cameraVideoCapturer = (CameraVideoCapturer) videoCapturer;
+ cameraVideoCapturer.switchCamera(null);
+ } else {
+ Log.d(TAG, "Will not switch camera, video caputurer is not a camera");
+ }
+ }
+
+ public void switchCamera() {
+ executor.execute(this ::switchCameraInternal);
+ }
+
+ public void changeCaptureFormat(final int width, final int height, final int framerate) {
+ executor.execute(() -> changeCaptureFormatInternal(width, height, framerate));
+ }
+
+ private void changeCaptureFormatInternal(int width, int height, int framerate) {
+ if (!isVideoCallEnabled() || isError || videoCapturer == null) {
+ Log.e(TAG,
+ "Failed to change capture format. Video: " + isVideoCallEnabled()
+ + ". Error : " + isError);
+ return;
+ }
+ Log.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate);
+ videoSource.adaptOutputFormat(width, height, framerate);
+ }
+
+ // Implementation detail: observe ICE & stream changes and react accordingly.
+ private class PCObserver implements PeerConnection.Observer {
+ @Override
+ public void onIceCandidate(final IceCandidate candidate) {
+ executor.execute(() -> events.onIceCandidate(candidate));
+ }
+
+ @Override
+ public void onIceCandidateError(final IceCandidateErrorEvent event) {
+ Log.d(TAG,
+ "IceCandidateError address: " + event.address + ", port: " + event.port + ", url: "
+ + event.url + ", errorCode: " + event.errorCode + ", errorText: " + event.errorText);
+ }
+
+ @Override
+ public void onIceCandidatesRemoved(final IceCandidate[] candidates) {
+ executor.execute(() -> events.onIceCandidatesRemoved(candidates));
+ }
+
+ @Override
+ public void onSignalingChange(PeerConnection.SignalingState newState) {
+ Log.d(TAG, "SignalingState: " + newState);
+ }
+
+ @Override
+ public void onIceConnectionChange(final PeerConnection.IceConnectionState newState) {
+ executor.execute(() -> {
+ Log.d(TAG, "IceConnectionState: " + newState);
+ if (newState == IceConnectionState.CONNECTED) {
+ events.onIceConnected();
+ } else if (newState == IceConnectionState.DISCONNECTED) {
+ events.onIceDisconnected();
+ } else if (newState == IceConnectionState.FAILED) {
+ reportError("ICE connection failed.");
+ }
+ });
+ }
+
+ @Override
+ public void onConnectionChange(final PeerConnection.PeerConnectionState newState) {
+ executor.execute(() -> {
+ Log.d(TAG, "PeerConnectionState: " + newState);
+ if (newState == PeerConnectionState.CONNECTED) {
+ events.onConnected();
+ } else if (newState == PeerConnectionState.DISCONNECTED) {
+ events.onDisconnected();
+ } else if (newState == PeerConnectionState.FAILED) {
+ reportError("DTLS connection failed.");
+ }
+ });
+ }
+
+ @Override
+ public void onIceGatheringChange(PeerConnection.IceGatheringState newState) {
+ Log.d(TAG, "IceGatheringState: " + newState);
+ }
+
+ @Override
+ public void onIceConnectionReceivingChange(boolean receiving) {
+ Log.d(TAG, "IceConnectionReceiving changed to " + receiving);
+ }
+
+ @Override
+ public void onSelectedCandidatePairChanged(CandidatePairChangeEvent event) {
+ Log.d(TAG, "Selected candidate pair changed because: " + event);
+ }
+
+ @Override
+ public void onAddStream(final MediaStream stream) {}
+
+ @Override
+ public void onRemoveStream(final MediaStream stream) {}
+
+ @Override
+ public void onDataChannel(final DataChannel dc) {
+ Log.d(TAG, "New Data channel " + dc.label());
+
+ if (!dataChannelEnabled)
+ return;
+
+ dc.registerObserver(new DataChannel.Observer() {
+ @Override
+ public void onBufferedAmountChange(long previousAmount) {
+ Log.d(TAG, "Data channel buffered amount changed: " + dc.label() + ": " + dc.state());
+ }
+
+ @Override
+ public void onStateChange() {
+ Log.d(TAG, "Data channel state changed: " + dc.label() + ": " + dc.state());
+ }
+
+ @Override
+ public void onMessage(final DataChannel.Buffer buffer) {
+ if (buffer.binary) {
+ Log.d(TAG, "Received binary msg over " + dc);
+ return;
+ }
+ ByteBuffer data = buffer.data;
+ final byte[] bytes = new byte[data.capacity()];
+ data.get(bytes);
+ String strData = new String(bytes, Charset.forName("UTF-8"));
+ Log.d(TAG, "Got msg: " + strData + " over " + dc);
+ }
+ });
+ }
+
+ @Override
+ public void onRenegotiationNeeded() {
+ // No need to do anything; AppRTC follows a pre-agreed-upon
+ // signaling/negotiation protocol.
+ }
+
+ @Override
+ public void onAddTrack(final RtpReceiver receiver, final MediaStream[] mediaStreams) {}
+
+ @Override
+ public void onRemoveTrack(final RtpReceiver receiver) {}
+ }
+
+ // Implementation detail: handle offer creation/signaling and answer setting,
+ // as well as adding remote ICE candidates once the answer SDP is set.
+ private class SDPObserver implements SdpObserver {
+ @Override
+ public void onCreateSuccess(final SessionDescription desc) {
+ if (localDescription != null) {
+ reportError("Multiple SDP create.");
+ return;
+ }
+ String sdp = desc.description;
+ if (preferIsac) {
+ sdp = preferCodec(sdp, AUDIO_CODEC_ISAC, true);
+ }
+ if (isVideoCallEnabled()) {
+ sdp = preferCodec(sdp, getSdpVideoCodecName(peerConnectionParameters), false);
+ }
+ final SessionDescription newDesc = new SessionDescription(desc.type, sdp);
+ localDescription = newDesc;
+ executor.execute(() -> {
+ if (peerConnection != null && !isError) {
+ Log.d(TAG, "Set local SDP from " + desc.type);
+ peerConnection.setLocalDescription(sdpObserver, newDesc);
+ }
+ });
+ }
+
+ @Override
+ public void onSetSuccess() {
+ executor.execute(() -> {
+ if (peerConnection == null || isError) {
+ return;
+ }
+ if (isInitiator) {
+ // For offering peer connection we first create offer and set
+ // local SDP, then after receiving answer set remote SDP.
+ if (peerConnection.getRemoteDescription() == null) {
+ // We've just set our local SDP so time to send it.
+ Log.d(TAG, "Local SDP set succesfully");
+ events.onLocalDescription(localDescription);
+ } else {
+ // We've just set remote description, so drain remote
+ // and send local ICE candidates.
+ Log.d(TAG, "Remote SDP set succesfully");
+ drainCandidates();
+ }
+ } else {
+ // For answering peer connection we set remote SDP and then
+ // create answer and set local SDP.
+ if (peerConnection.getLocalDescription() != null) {
+ // We've just set our local SDP so time to send it, drain
+ // remote and send local ICE candidates.
+ Log.d(TAG, "Local SDP set succesfully");
+ events.onLocalDescription(localDescription);
+ drainCandidates();
+ } else {
+ // We've just set remote SDP - do nothing for now -
+ // answer will be created soon.
+ Log.d(TAG, "Remote SDP set succesfully");
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onCreateFailure(final String error) {
+ reportError("createSDP error: " + error);
+ }
+
+ @Override
+ public void onSetFailure(final String error) {
+ reportError("setSDP error: " + error);
+ }
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/RecordedAudioToFileController.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/RecordedAudioToFileController.java
new file mode 100644
index 0000000000..9787852feb
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/RecordedAudioToFileController.java
@@ -0,0 +1,143 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.media.AudioFormat;
+import android.os.Environment;
+import android.util.Log;
+import androidx.annotation.Nullable;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.concurrent.ExecutorService;
+import org.webrtc.audio.JavaAudioDeviceModule;
+import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback;
+
+/**
+ * Implements the AudioRecordSamplesReadyCallback interface and writes
+ * recorded raw audio samples to an output file.
+ */
+public class RecordedAudioToFileController implements SamplesReadyCallback {
+ private static final String TAG = "RecordedAudioToFile";
+ private static final long MAX_FILE_SIZE_IN_BYTES = 58348800L;
+
+ private final Object lock = new Object();
+ private final ExecutorService executor;
+ @Nullable private OutputStream rawAudioFileOutputStream;
+ private boolean isRunning;
+ private long fileSizeInBytes;
+
+ public RecordedAudioToFileController(ExecutorService executor) {
+ Log.d(TAG, "ctor");
+ this.executor = executor;
+ }
+
+ /**
+ * Should be called on the same executor thread as the one provided at
+ * construction.
+ */
+ public boolean start() {
+ Log.d(TAG, "start");
+ if (!isExternalStorageWritable()) {
+ Log.e(TAG, "Writing to external media is not possible");
+ return false;
+ }
+ synchronized (lock) {
+ isRunning = true;
+ }
+ return true;
+ }
+
+ /**
+ * Should be called on the same executor thread as the one provided at
+ * construction.
+ */
+ public void stop() {
+ Log.d(TAG, "stop");
+ synchronized (lock) {
+ isRunning = false;
+ if (rawAudioFileOutputStream != null) {
+ try {
+ rawAudioFileOutputStream.close();
+ } catch (IOException e) {
+ Log.e(TAG, "Failed to close file with saved input audio: " + e);
+ }
+ rawAudioFileOutputStream = null;
+ }
+ fileSizeInBytes = 0;
+ }
+ }
+
+ // Checks if external storage is available for read and write.
+ private boolean isExternalStorageWritable() {
+ String state = Environment.getExternalStorageState();
+ if (Environment.MEDIA_MOUNTED.equals(state)) {
+ return true;
+ }
+ return false;
+ }
+
+ // Utilizes audio parameters to create a file name which contains sufficient
+ // information so that the file can be played using an external file player.
+ // Example: /sdcard/recorded_audio_16bits_48000Hz_mono.pcm.
+ private void openRawAudioOutputFile(int sampleRate, int channelCount) {
+ final String fileName = Environment.getExternalStorageDirectory().getPath() + File.separator
+ + "recorded_audio_16bits_" + String.valueOf(sampleRate) + "Hz"
+ + ((channelCount == 1) ? "_mono" : "_stereo") + ".pcm";
+ final File outputFile = new File(fileName);
+ try {
+ rawAudioFileOutputStream = new FileOutputStream(outputFile);
+ } catch (FileNotFoundException e) {
+ Log.e(TAG, "Failed to open audio output file: " + e.getMessage());
+ }
+ Log.d(TAG, "Opened file for recording: " + fileName);
+ }
+
+ // Called when new audio samples are ready.
+ @Override
+ public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples samples) {
+ // The native audio layer on Android should use 16-bit PCM format.
+ if (samples.getAudioFormat() != AudioFormat.ENCODING_PCM_16BIT) {
+ Log.e(TAG, "Invalid audio format");
+ return;
+ }
+ synchronized (lock) {
+ // Abort early if stop() has been called.
+ if (!isRunning) {
+ return;
+ }
+ // Open a new file for the first callback only since it allows us to add audio parameters to
+ // the file name.
+ if (rawAudioFileOutputStream == null) {
+ openRawAudioOutputFile(samples.getSampleRate(), samples.getChannelCount());
+ fileSizeInBytes = 0;
+ }
+ }
+ // Append the recorded 16-bit audio samples to the open output file.
+ executor.execute(() -> {
+ if (rawAudioFileOutputStream != null) {
+ try {
+ // Set a limit on max file size. 58348800 bytes corresponds to
+ // approximately 10 minutes of recording in mono at 48kHz.
+ if (fileSizeInBytes < MAX_FILE_SIZE_IN_BYTES) {
+ // Writes samples.getData().length bytes to output stream.
+ rawAudioFileOutputStream.write(samples.getData());
+ fileSizeInBytes += samples.getData().length;
+ }
+ } catch (IOException e) {
+ Log.e(TAG, "Failed to write audio to file: " + e.getMessage());
+ }
+ }
+ });
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/RoomParametersFetcher.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/RoomParametersFetcher.java
new file mode 100644
index 0000000000..6a0f235528
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/RoomParametersFetcher.java
@@ -0,0 +1,226 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.util.Log;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.Scanner;
+import java.util.List;
+import org.appspot.apprtc.AppRTCClient.SignalingParameters;
+import org.appspot.apprtc.util.AsyncHttpURLConnection;
+import org.appspot.apprtc.util.AsyncHttpURLConnection.AsyncHttpEvents;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.webrtc.IceCandidate;
+import org.webrtc.PeerConnection;
+import org.webrtc.SessionDescription;
+
+/**
+ * AsyncTask that converts an AppRTC room URL into the set of signaling
+ * parameters to use with that room.
+ */
+public class RoomParametersFetcher {
+ private static final String TAG = "RoomRTCClient";
+ private static final int TURN_HTTP_TIMEOUT_MS = 5000;
+ private final RoomParametersFetcherEvents events;
+ private final String roomUrl;
+ private final String roomMessage;
+
+ /**
+ * Room parameters fetcher callbacks.
+ */
+ public interface RoomParametersFetcherEvents {
+ /**
+ * Callback fired once the room's signaling parameters
+ * SignalingParameters are extracted.
+ */
+ void onSignalingParametersReady(final SignalingParameters params);
+
+ /**
+ * Callback for room parameters extraction error.
+ */
+ void onSignalingParametersError(final String description);
+ }
+
+ public RoomParametersFetcher(
+ String roomUrl, String roomMessage, final RoomParametersFetcherEvents events) {
+ this.roomUrl = roomUrl;
+ this.roomMessage = roomMessage;
+ this.events = events;
+ }
+
+ public void makeRequest() {
+ Log.d(TAG, "Connecting to room: " + roomUrl);
+ AsyncHttpURLConnection httpConnection =
+ new AsyncHttpURLConnection("POST", roomUrl, roomMessage, new AsyncHttpEvents() {
+ @Override
+ public void onHttpError(String errorMessage) {
+ Log.e(TAG, "Room connection error: " + errorMessage);
+ events.onSignalingParametersError(errorMessage);
+ }
+
+ @Override
+ public void onHttpComplete(String response) {
+ roomHttpResponseParse(response);
+ }
+ });
+ httpConnection.send();
+ }
+
+ private void roomHttpResponseParse(String response) {
+ Log.d(TAG, "Room response: " + response);
+ try {
+ List<IceCandidate> iceCandidates = null;
+ SessionDescription offerSdp = null;
+ JSONObject roomJson = new JSONObject(response);
+
+ String result = roomJson.getString("result");
+ if (!result.equals("SUCCESS")) {
+ events.onSignalingParametersError("Room response error: " + result);
+ return;
+ }
+ response = roomJson.getString("params");
+ roomJson = new JSONObject(response);
+ String roomId = roomJson.getString("room_id");
+ String clientId = roomJson.getString("client_id");
+ String wssUrl = roomJson.getString("wss_url");
+ String wssPostUrl = roomJson.getString("wss_post_url");
+ boolean initiator = (roomJson.getBoolean("is_initiator"));
+ if (!initiator) {
+ iceCandidates = new ArrayList<>();
+ String messagesString = roomJson.getString("messages");
+ JSONArray messages = new JSONArray(messagesString);
+ for (int i = 0; i < messages.length(); ++i) {
+ String messageString = messages.getString(i);
+ JSONObject message = new JSONObject(messageString);
+ String messageType = message.getString("type");
+ Log.d(TAG, "GAE->C #" + i + " : " + messageString);
+ if (messageType.equals("offer")) {
+ offerSdp = new SessionDescription(
+ SessionDescription.Type.fromCanonicalForm(messageType), message.getString("sdp"));
+ } else if (messageType.equals("candidate")) {
+ IceCandidate candidate = new IceCandidate(
+ message.getString("id"), message.getInt("label"), message.getString("candidate"));
+ iceCandidates.add(candidate);
+ } else {
+ Log.e(TAG, "Unknown message: " + messageString);
+ }
+ }
+ }
+ Log.d(TAG, "RoomId: " + roomId + ". ClientId: " + clientId);
+ Log.d(TAG, "Initiator: " + initiator);
+ Log.d(TAG, "WSS url: " + wssUrl);
+ Log.d(TAG, "WSS POST url: " + wssPostUrl);
+
+ List<PeerConnection.IceServer> iceServers =
+ iceServersFromPCConfigJSON(roomJson.getString("pc_config"));
+ boolean isTurnPresent = false;
+ for (PeerConnection.IceServer server : iceServers) {
+ Log.d(TAG, "IceServer: " + server);
+ for (String uri : server.urls) {
+ if (uri.startsWith("turn:")) {
+ isTurnPresent = true;
+ break;
+ }
+ }
+ }
+ // Request TURN servers.
+ if (!isTurnPresent && !roomJson.optString("ice_server_url").isEmpty()) {
+ List<PeerConnection.IceServer> turnServers =
+ requestTurnServers(roomJson.getString("ice_server_url"));
+ for (PeerConnection.IceServer turnServer : turnServers) {
+ Log.d(TAG, "TurnServer: " + turnServer);
+ iceServers.add(turnServer);
+ }
+ }
+
+ SignalingParameters params = new SignalingParameters(
+ iceServers, initiator, clientId, wssUrl, wssPostUrl, offerSdp, iceCandidates);
+ events.onSignalingParametersReady(params);
+ } catch (JSONException e) {
+ events.onSignalingParametersError("Room JSON parsing error: " + e.toString());
+ } catch (IOException e) {
+ events.onSignalingParametersError("Room IO error: " + e.toString());
+ }
+ }
+
+ // Requests & returns a TURN ICE Server based on a request URL. Must be run
+ // off the main thread!
+ @SuppressWarnings("UseNetworkAnnotations")
+ private List<PeerConnection.IceServer> requestTurnServers(String url)
+ throws IOException, JSONException {
+ List<PeerConnection.IceServer> turnServers = new ArrayList<>();
+ Log.d(TAG, "Request TURN from: " + url);
+ HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
+ connection.setDoOutput(true);
+ connection.setRequestProperty("REFERER", "https://appr.tc");
+ connection.setConnectTimeout(TURN_HTTP_TIMEOUT_MS);
+ connection.setReadTimeout(TURN_HTTP_TIMEOUT_MS);
+ int responseCode = connection.getResponseCode();
+ if (responseCode != 200) {
+ throw new IOException("Non-200 response when requesting TURN server from " + url + " : "
+ + connection.getHeaderField(null));
+ }
+ InputStream responseStream = connection.getInputStream();
+ String response = drainStream(responseStream);
+ connection.disconnect();
+ Log.d(TAG, "TURN response: " + response);
+ JSONObject responseJSON = new JSONObject(response);
+ JSONArray iceServers = responseJSON.getJSONArray("iceServers");
+ for (int i = 0; i < iceServers.length(); ++i) {
+ JSONObject server = iceServers.getJSONObject(i);
+ JSONArray turnUrls = server.getJSONArray("urls");
+ String username = server.has("username") ? server.getString("username") : "";
+ String credential = server.has("credential") ? server.getString("credential") : "";
+ for (int j = 0; j < turnUrls.length(); j++) {
+ String turnUrl = turnUrls.getString(j);
+ PeerConnection.IceServer turnServer =
+ PeerConnection.IceServer.builder(turnUrl)
+ .setUsername(username)
+ .setPassword(credential)
+ .createIceServer();
+ turnServers.add(turnServer);
+ }
+ }
+ return turnServers;
+ }
+
+ // Return the list of ICE servers described by a WebRTCPeerConnection
+ // configuration string.
+ private List<PeerConnection.IceServer> iceServersFromPCConfigJSON(String pcConfig)
+ throws JSONException {
+ JSONObject json = new JSONObject(pcConfig);
+ JSONArray servers = json.getJSONArray("iceServers");
+ List<PeerConnection.IceServer> ret = new ArrayList<>();
+ for (int i = 0; i < servers.length(); ++i) {
+ JSONObject server = servers.getJSONObject(i);
+ String url = server.getString("urls");
+ String credential = server.has("credential") ? server.getString("credential") : "";
+ PeerConnection.IceServer turnServer =
+ PeerConnection.IceServer.builder(url)
+ .setPassword(credential)
+ .createIceServer();
+ ret.add(turnServer);
+ }
+ return ret;
+ }
+
+ // Return the contents of an InputStream as a String.
+ private static String drainStream(InputStream in) {
+ Scanner s = new Scanner(in, "UTF-8").useDelimiter("\\A");
+ return s.hasNext() ? s.next() : "";
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/RtcEventLog.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/RtcEventLog.java
new file mode 100644
index 0000000000..103ad10f0b
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/RtcEventLog.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.os.ParcelFileDescriptor;
+import android.util.Log;
+import java.io.File;
+import java.io.IOException;
+import org.webrtc.PeerConnection;
+
+public class RtcEventLog {
+ private static final String TAG = "RtcEventLog";
+ private static final int OUTPUT_FILE_MAX_BYTES = 10_000_000;
+ private final PeerConnection peerConnection;
+ private RtcEventLogState state = RtcEventLogState.INACTIVE;
+
+ enum RtcEventLogState {
+ INACTIVE,
+ STARTED,
+ STOPPED,
+ }
+
+ public RtcEventLog(PeerConnection peerConnection) {
+ if (peerConnection == null) {
+ throw new NullPointerException("The peer connection is null.");
+ }
+ this.peerConnection = peerConnection;
+ }
+
+ public void start(final File outputFile) {
+ if (state == RtcEventLogState.STARTED) {
+ Log.e(TAG, "RtcEventLog has already started.");
+ return;
+ }
+ final ParcelFileDescriptor fileDescriptor;
+ try {
+ fileDescriptor = ParcelFileDescriptor.open(outputFile,
+ ParcelFileDescriptor.MODE_READ_WRITE | ParcelFileDescriptor.MODE_CREATE
+ | ParcelFileDescriptor.MODE_TRUNCATE);
+ } catch (IOException e) {
+ Log.e(TAG, "Failed to create a new file", e);
+ return;
+ }
+
+ // Passes ownership of the file to WebRTC.
+ boolean success =
+ peerConnection.startRtcEventLog(fileDescriptor.detachFd(), OUTPUT_FILE_MAX_BYTES);
+ if (!success) {
+ Log.e(TAG, "Failed to start RTC event log.");
+ return;
+ }
+ state = RtcEventLogState.STARTED;
+ Log.d(TAG, "RtcEventLog started.");
+ }
+
+ public void stop() {
+ if (state != RtcEventLogState.STARTED) {
+ Log.e(TAG, "RtcEventLog was not started.");
+ return;
+ }
+ peerConnection.stopRtcEventLog();
+ state = RtcEventLogState.STOPPED;
+ Log.d(TAG, "RtcEventLog stopped.");
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/SettingsActivity.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/SettingsActivity.java
new file mode 100644
index 0000000000..e9c6f6b798
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/SettingsActivity.java
@@ -0,0 +1,317 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.app.Activity;
+import android.content.SharedPreferences;
+import android.content.SharedPreferences.OnSharedPreferenceChangeListener;
+import android.os.Bundle;
+import android.preference.ListPreference;
+import android.preference.Preference;
+import org.webrtc.Camera2Enumerator;
+import org.webrtc.audio.JavaAudioDeviceModule;
+
+/**
+ * Settings activity for AppRTC.
+ */
+public class SettingsActivity extends Activity implements OnSharedPreferenceChangeListener {
+ private SettingsFragment settingsFragment;
+ private String keyprefVideoCall;
+ private String keyprefScreencapture;
+ private String keyprefCamera2;
+ private String keyprefResolution;
+ private String keyprefFps;
+ private String keyprefCaptureQualitySlider;
+ private String keyprefMaxVideoBitrateType;
+ private String keyprefMaxVideoBitrateValue;
+ private String keyPrefVideoCodec;
+ private String keyprefHwCodec;
+ private String keyprefCaptureToTexture;
+ private String keyprefFlexfec;
+
+ private String keyprefStartAudioBitrateType;
+ private String keyprefStartAudioBitrateValue;
+ private String keyPrefAudioCodec;
+ private String keyprefNoAudioProcessing;
+ private String keyprefAecDump;
+ private String keyprefEnableSaveInputAudioToFile;
+ private String keyprefOpenSLES;
+ private String keyprefDisableBuiltInAEC;
+ private String keyprefDisableBuiltInAGC;
+ private String keyprefDisableBuiltInNS;
+ private String keyprefDisableWebRtcAGCAndHPF;
+ private String keyprefSpeakerphone;
+
+ private String keyPrefRoomServerUrl;
+ private String keyPrefDisplayHud;
+ private String keyPrefTracing;
+ private String keyprefEnabledRtcEventLog;
+
+ private String keyprefEnableDataChannel;
+ private String keyprefOrdered;
+ private String keyprefMaxRetransmitTimeMs;
+ private String keyprefMaxRetransmits;
+ private String keyprefDataProtocol;
+ private String keyprefNegotiated;
+ private String keyprefDataId;
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ keyprefVideoCall = getString(R.string.pref_videocall_key);
+ keyprefScreencapture = getString(R.string.pref_screencapture_key);
+ keyprefCamera2 = getString(R.string.pref_camera2_key);
+ keyprefResolution = getString(R.string.pref_resolution_key);
+ keyprefFps = getString(R.string.pref_fps_key);
+ keyprefCaptureQualitySlider = getString(R.string.pref_capturequalityslider_key);
+ keyprefMaxVideoBitrateType = getString(R.string.pref_maxvideobitrate_key);
+ keyprefMaxVideoBitrateValue = getString(R.string.pref_maxvideobitratevalue_key);
+ keyPrefVideoCodec = getString(R.string.pref_videocodec_key);
+ keyprefHwCodec = getString(R.string.pref_hwcodec_key);
+ keyprefCaptureToTexture = getString(R.string.pref_capturetotexture_key);
+ keyprefFlexfec = getString(R.string.pref_flexfec_key);
+
+ keyprefStartAudioBitrateType = getString(R.string.pref_startaudiobitrate_key);
+ keyprefStartAudioBitrateValue = getString(R.string.pref_startaudiobitratevalue_key);
+ keyPrefAudioCodec = getString(R.string.pref_audiocodec_key);
+ keyprefNoAudioProcessing = getString(R.string.pref_noaudioprocessing_key);
+ keyprefAecDump = getString(R.string.pref_aecdump_key);
+ keyprefEnableSaveInputAudioToFile =
+ getString(R.string.pref_enable_save_input_audio_to_file_key);
+ keyprefOpenSLES = getString(R.string.pref_opensles_key);
+ keyprefDisableBuiltInAEC = getString(R.string.pref_disable_built_in_aec_key);
+ keyprefDisableBuiltInAGC = getString(R.string.pref_disable_built_in_agc_key);
+ keyprefDisableBuiltInNS = getString(R.string.pref_disable_built_in_ns_key);
+ keyprefDisableWebRtcAGCAndHPF = getString(R.string.pref_disable_webrtc_agc_and_hpf_key);
+ keyprefSpeakerphone = getString(R.string.pref_speakerphone_key);
+
+ keyprefEnableDataChannel = getString(R.string.pref_enable_datachannel_key);
+ keyprefOrdered = getString(R.string.pref_ordered_key);
+ keyprefMaxRetransmitTimeMs = getString(R.string.pref_max_retransmit_time_ms_key);
+ keyprefMaxRetransmits = getString(R.string.pref_max_retransmits_key);
+ keyprefDataProtocol = getString(R.string.pref_data_protocol_key);
+ keyprefNegotiated = getString(R.string.pref_negotiated_key);
+ keyprefDataId = getString(R.string.pref_data_id_key);
+
+ keyPrefRoomServerUrl = getString(R.string.pref_room_server_url_key);
+ keyPrefDisplayHud = getString(R.string.pref_displayhud_key);
+ keyPrefTracing = getString(R.string.pref_tracing_key);
+ keyprefEnabledRtcEventLog = getString(R.string.pref_enable_rtceventlog_key);
+
+ // Display the fragment as the main content.
+ settingsFragment = new SettingsFragment();
+ getFragmentManager()
+ .beginTransaction()
+ .replace(android.R.id.content, settingsFragment)
+ .commit();
+ }
+
+ @Override
+ protected void onResume() {
+ super.onResume();
+ // Set summary to be the user-description for the selected value
+ SharedPreferences sharedPreferences =
+ settingsFragment.getPreferenceScreen().getSharedPreferences();
+ sharedPreferences.registerOnSharedPreferenceChangeListener(this);
+ updateSummaryB(sharedPreferences, keyprefVideoCall);
+ updateSummaryB(sharedPreferences, keyprefScreencapture);
+ updateSummaryB(sharedPreferences, keyprefCamera2);
+ updateSummary(sharedPreferences, keyprefResolution);
+ updateSummary(sharedPreferences, keyprefFps);
+ updateSummaryB(sharedPreferences, keyprefCaptureQualitySlider);
+ updateSummary(sharedPreferences, keyprefMaxVideoBitrateType);
+ updateSummaryBitrate(sharedPreferences, keyprefMaxVideoBitrateValue);
+ setVideoBitrateEnable(sharedPreferences);
+ updateSummary(sharedPreferences, keyPrefVideoCodec);
+ updateSummaryB(sharedPreferences, keyprefHwCodec);
+ updateSummaryB(sharedPreferences, keyprefCaptureToTexture);
+ updateSummaryB(sharedPreferences, keyprefFlexfec);
+
+ updateSummary(sharedPreferences, keyprefStartAudioBitrateType);
+ updateSummaryBitrate(sharedPreferences, keyprefStartAudioBitrateValue);
+ setAudioBitrateEnable(sharedPreferences);
+ updateSummary(sharedPreferences, keyPrefAudioCodec);
+ updateSummaryB(sharedPreferences, keyprefNoAudioProcessing);
+ updateSummaryB(sharedPreferences, keyprefAecDump);
+ updateSummaryB(sharedPreferences, keyprefEnableSaveInputAudioToFile);
+ updateSummaryB(sharedPreferences, keyprefOpenSLES);
+ updateSummaryB(sharedPreferences, keyprefDisableBuiltInAEC);
+ updateSummaryB(sharedPreferences, keyprefDisableBuiltInAGC);
+ updateSummaryB(sharedPreferences, keyprefDisableBuiltInNS);
+ updateSummaryB(sharedPreferences, keyprefDisableWebRtcAGCAndHPF);
+ updateSummaryList(sharedPreferences, keyprefSpeakerphone);
+
+ updateSummaryB(sharedPreferences, keyprefEnableDataChannel);
+ updateSummaryB(sharedPreferences, keyprefOrdered);
+ updateSummary(sharedPreferences, keyprefMaxRetransmitTimeMs);
+ updateSummary(sharedPreferences, keyprefMaxRetransmits);
+ updateSummary(sharedPreferences, keyprefDataProtocol);
+ updateSummaryB(sharedPreferences, keyprefNegotiated);
+ updateSummary(sharedPreferences, keyprefDataId);
+ setDataChannelEnable(sharedPreferences);
+
+ updateSummary(sharedPreferences, keyPrefRoomServerUrl);
+ updateSummaryB(sharedPreferences, keyPrefDisplayHud);
+ updateSummaryB(sharedPreferences, keyPrefTracing);
+ updateSummaryB(sharedPreferences, keyprefEnabledRtcEventLog);
+
+ if (!Camera2Enumerator.isSupported(this)) {
+ Preference camera2Preference = settingsFragment.findPreference(keyprefCamera2);
+
+ camera2Preference.setSummary(getString(R.string.pref_camera2_not_supported));
+ camera2Preference.setEnabled(false);
+ }
+
+ if (!JavaAudioDeviceModule.isBuiltInAcousticEchoCancelerSupported()) {
+ Preference disableBuiltInAECPreference =
+ settingsFragment.findPreference(keyprefDisableBuiltInAEC);
+
+ disableBuiltInAECPreference.setSummary(getString(R.string.pref_built_in_aec_not_available));
+ disableBuiltInAECPreference.setEnabled(false);
+ }
+
+ Preference disableBuiltInAGCPreference =
+ settingsFragment.findPreference(keyprefDisableBuiltInAGC);
+
+ disableBuiltInAGCPreference.setSummary(getString(R.string.pref_built_in_agc_not_available));
+ disableBuiltInAGCPreference.setEnabled(false);
+
+ if (!JavaAudioDeviceModule.isBuiltInNoiseSuppressorSupported()) {
+ Preference disableBuiltInNSPreference =
+ settingsFragment.findPreference(keyprefDisableBuiltInNS);
+
+ disableBuiltInNSPreference.setSummary(getString(R.string.pref_built_in_ns_not_available));
+ disableBuiltInNSPreference.setEnabled(false);
+ }
+ }
+
+ @Override
+ protected void onPause() {
+ super.onPause();
+ SharedPreferences sharedPreferences =
+ settingsFragment.getPreferenceScreen().getSharedPreferences();
+ sharedPreferences.unregisterOnSharedPreferenceChangeListener(this);
+ }
+
+ @Override
+ public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
+ // clang-format off
+ if (key.equals(keyprefResolution)
+ || key.equals(keyprefFps)
+ || key.equals(keyprefMaxVideoBitrateType)
+ || key.equals(keyPrefVideoCodec)
+ || key.equals(keyprefStartAudioBitrateType)
+ || key.equals(keyPrefAudioCodec)
+ || key.equals(keyPrefRoomServerUrl)
+ || key.equals(keyprefMaxRetransmitTimeMs)
+ || key.equals(keyprefMaxRetransmits)
+ || key.equals(keyprefDataProtocol)
+ || key.equals(keyprefDataId)) {
+ updateSummary(sharedPreferences, key);
+ } else if (key.equals(keyprefMaxVideoBitrateValue)
+ || key.equals(keyprefStartAudioBitrateValue)) {
+ updateSummaryBitrate(sharedPreferences, key);
+ } else if (key.equals(keyprefVideoCall)
+ || key.equals(keyprefScreencapture)
+ || key.equals(keyprefCamera2)
+ || key.equals(keyPrefTracing)
+ || key.equals(keyprefCaptureQualitySlider)
+ || key.equals(keyprefHwCodec)
+ || key.equals(keyprefCaptureToTexture)
+ || key.equals(keyprefFlexfec)
+ || key.equals(keyprefNoAudioProcessing)
+ || key.equals(keyprefAecDump)
+ || key.equals(keyprefEnableSaveInputAudioToFile)
+ || key.equals(keyprefOpenSLES)
+ || key.equals(keyprefDisableBuiltInAEC)
+ || key.equals(keyprefDisableBuiltInAGC)
+ || key.equals(keyprefDisableBuiltInNS)
+ || key.equals(keyprefDisableWebRtcAGCAndHPF)
+ || key.equals(keyPrefDisplayHud)
+ || key.equals(keyprefEnableDataChannel)
+ || key.equals(keyprefOrdered)
+ || key.equals(keyprefNegotiated)
+ || key.equals(keyprefEnabledRtcEventLog)) {
+ updateSummaryB(sharedPreferences, key);
+ } else if (key.equals(keyprefSpeakerphone)) {
+ updateSummaryList(sharedPreferences, key);
+ }
+ // clang-format on
+ if (key.equals(keyprefMaxVideoBitrateType)) {
+ setVideoBitrateEnable(sharedPreferences);
+ }
+ if (key.equals(keyprefStartAudioBitrateType)) {
+ setAudioBitrateEnable(sharedPreferences);
+ }
+ if (key.equals(keyprefEnableDataChannel)) {
+ setDataChannelEnable(sharedPreferences);
+ }
+ }
+
+ private void updateSummary(SharedPreferences sharedPreferences, String key) {
+ Preference updatedPref = settingsFragment.findPreference(key);
+ // Set summary to be the user-description for the selected value
+ updatedPref.setSummary(sharedPreferences.getString(key, ""));
+ }
+
+ private void updateSummaryBitrate(SharedPreferences sharedPreferences, String key) {
+ Preference updatedPref = settingsFragment.findPreference(key);
+ updatedPref.setSummary(sharedPreferences.getString(key, "") + " kbps");
+ }
+
+ private void updateSummaryB(SharedPreferences sharedPreferences, String key) {
+ Preference updatedPref = settingsFragment.findPreference(key);
+ updatedPref.setSummary(sharedPreferences.getBoolean(key, true)
+ ? getString(R.string.pref_value_enabled)
+ : getString(R.string.pref_value_disabled));
+ }
+
+ private void updateSummaryList(SharedPreferences sharedPreferences, String key) {
+ ListPreference updatedPref = (ListPreference) settingsFragment.findPreference(key);
+ updatedPref.setSummary(updatedPref.getEntry());
+ }
+
+ private void setVideoBitrateEnable(SharedPreferences sharedPreferences) {
+ Preference bitratePreferenceValue =
+ settingsFragment.findPreference(keyprefMaxVideoBitrateValue);
+ String bitrateTypeDefault = getString(R.string.pref_maxvideobitrate_default);
+ String bitrateType =
+ sharedPreferences.getString(keyprefMaxVideoBitrateType, bitrateTypeDefault);
+ if (bitrateType.equals(bitrateTypeDefault)) {
+ bitratePreferenceValue.setEnabled(false);
+ } else {
+ bitratePreferenceValue.setEnabled(true);
+ }
+ }
+
+ private void setAudioBitrateEnable(SharedPreferences sharedPreferences) {
+ Preference bitratePreferenceValue =
+ settingsFragment.findPreference(keyprefStartAudioBitrateValue);
+ String bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default);
+ String bitrateType =
+ sharedPreferences.getString(keyprefStartAudioBitrateType, bitrateTypeDefault);
+ if (bitrateType.equals(bitrateTypeDefault)) {
+ bitratePreferenceValue.setEnabled(false);
+ } else {
+ bitratePreferenceValue.setEnabled(true);
+ }
+ }
+
+ private void setDataChannelEnable(SharedPreferences sharedPreferences) {
+ boolean enabled = sharedPreferences.getBoolean(keyprefEnableDataChannel, true);
+ settingsFragment.findPreference(keyprefOrdered).setEnabled(enabled);
+ settingsFragment.findPreference(keyprefMaxRetransmitTimeMs).setEnabled(enabled);
+ settingsFragment.findPreference(keyprefMaxRetransmits).setEnabled(enabled);
+ settingsFragment.findPreference(keyprefDataProtocol).setEnabled(enabled);
+ settingsFragment.findPreference(keyprefNegotiated).setEnabled(enabled);
+ settingsFragment.findPreference(keyprefDataId).setEnabled(enabled);
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/SettingsFragment.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/SettingsFragment.java
new file mode 100644
index 0000000000..d969bd7d32
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/SettingsFragment.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.os.Bundle;
+import android.preference.PreferenceFragment;
+
+/**
+ * Settings fragment for AppRTC.
+ */
+public class SettingsFragment extends PreferenceFragment {
+ @Override
+ public void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ // Load the preferences from an XML resource
+ addPreferencesFromResource(R.xml.preferences);
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/TCPChannelClient.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/TCPChannelClient.java
new file mode 100644
index 0000000000..d869d7ca66
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/TCPChannelClient.java
@@ -0,0 +1,362 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.util.Log;
+import androidx.annotation.Nullable;
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.OutputStreamWriter;
+import java.io.PrintWriter;
+import java.net.InetAddress;
+import java.net.ServerSocket;
+import java.net.Socket;
+import java.net.UnknownHostException;
+import java.nio.charset.Charset;
+import java.util.concurrent.ExecutorService;
+import org.webrtc.ThreadUtils;
+
+/**
+ * Replacement for WebSocketChannelClient for direct communication between two IP addresses. Handles
+ * the signaling between the two clients using a TCP connection.
+ * <p>
+ * All public methods should be called from a looper executor thread
+ * passed in a constructor, otherwise exception will be thrown.
+ * All events are dispatched on the same thread.
+ */
+public class TCPChannelClient {
+ private static final String TAG = "TCPChannelClient";
+
+ private final ExecutorService executor;
+ private final ThreadUtils.ThreadChecker executorThreadCheck;
+ private final TCPChannelEvents eventListener;
+ private TCPSocket socket;
+
+ /**
+ * Callback interface for messages delivered on TCP Connection. All callbacks are invoked from the
+ * looper executor thread.
+ */
+ public interface TCPChannelEvents {
+ void onTCPConnected(boolean server);
+ void onTCPMessage(String message);
+ void onTCPError(String description);
+ void onTCPClose();
+ }
+
+ /**
+ * Initializes the TCPChannelClient. If IP is a local IP address, starts a listening server on
+ * that IP. If not, instead connects to the IP.
+ *
+ * @param eventListener Listener that will receive events from the client.
+ * @param ip IP address to listen on or connect to.
+ * @param port Port to listen on or connect to.
+ */
+ public TCPChannelClient(
+ ExecutorService executor, TCPChannelEvents eventListener, String ip, int port) {
+ this.executor = executor;
+ executorThreadCheck = new ThreadUtils.ThreadChecker();
+ executorThreadCheck.detachThread();
+ this.eventListener = eventListener;
+
+ InetAddress address;
+ try {
+ address = InetAddress.getByName(ip);
+ } catch (UnknownHostException e) {
+ reportError("Invalid IP address.");
+ return;
+ }
+
+ if (address.isAnyLocalAddress()) {
+ socket = new TCPSocketServer(address, port);
+ } else {
+ socket = new TCPSocketClient(address, port);
+ }
+
+ socket.start();
+ }
+
+ /**
+ * Disconnects the client if not already disconnected. This will fire the onTCPClose event.
+ */
+ public void disconnect() {
+ executorThreadCheck.checkIsOnValidThread();
+
+ socket.disconnect();
+ }
+
+ /**
+ * Sends a message on the socket.
+ *
+ * @param message Message to be sent.
+ */
+ public void send(String message) {
+ executorThreadCheck.checkIsOnValidThread();
+
+ socket.send(message);
+ }
+
+ /**
+ * Helper method for firing onTCPError events. Calls onTCPError on the executor thread.
+ */
+ private void reportError(final String message) {
+ Log.e(TAG, "TCP Error: " + message);
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ eventListener.onTCPError(message);
+ }
+ });
+ }
+
+ /**
+ * Base class for server and client sockets. Contains a listening thread that will call
+ * eventListener.onTCPMessage on new messages.
+ */
+ private abstract class TCPSocket extends Thread {
+ // Lock for editing out and rawSocket
+ protected final Object rawSocketLock;
+ @Nullable
+ private PrintWriter out;
+ @Nullable
+ private Socket rawSocket;
+
+ /**
+ * Connect to the peer, potentially a slow operation.
+ *
+ * @return Socket connection, null if connection failed.
+ */
+ @Nullable
+ public abstract Socket connect();
+
+ /** Returns true if sockets is a server rawSocket. */
+ public abstract boolean isServer();
+
+ TCPSocket() {
+ rawSocketLock = new Object();
+ }
+
+ /**
+ * The listening thread.
+ */
+ @Override
+ public void run() {
+ Log.d(TAG, "Listening thread started...");
+
+ // Receive connection to temporary variable first, so we don't block.
+ Socket tempSocket = connect();
+ BufferedReader in;
+
+ Log.d(TAG, "TCP connection established.");
+
+ synchronized (rawSocketLock) {
+ if (rawSocket != null) {
+ Log.e(TAG, "Socket already existed and will be replaced.");
+ }
+
+ rawSocket = tempSocket;
+
+ // Connecting failed, error has already been reported, just exit.
+ if (rawSocket == null) {
+ return;
+ }
+
+ try {
+ out = new PrintWriter(
+ new OutputStreamWriter(rawSocket.getOutputStream(), Charset.forName("UTF-8")), true);
+ in = new BufferedReader(
+ new InputStreamReader(rawSocket.getInputStream(), Charset.forName("UTF-8")));
+ } catch (IOException e) {
+ reportError("Failed to open IO on rawSocket: " + e.getMessage());
+ return;
+ }
+ }
+
+ Log.v(TAG, "Execute onTCPConnected");
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ Log.v(TAG, "Run onTCPConnected");
+ eventListener.onTCPConnected(isServer());
+ }
+ });
+
+ while (true) {
+ final String message;
+ try {
+ message = in.readLine();
+ } catch (IOException e) {
+ synchronized (rawSocketLock) {
+ // If socket was closed, this is expected.
+ if (rawSocket == null) {
+ break;
+ }
+ }
+
+ reportError("Failed to read from rawSocket: " + e.getMessage());
+ break;
+ }
+
+ // No data received, rawSocket probably closed.
+ if (message == null) {
+ break;
+ }
+
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ Log.v(TAG, "Receive: " + message);
+ eventListener.onTCPMessage(message);
+ }
+ });
+ }
+
+ Log.d(TAG, "Receiving thread exiting...");
+
+ // Close the rawSocket if it is still open.
+ disconnect();
+ }
+
+ /** Closes the rawSocket if it is still open. Also fires the onTCPClose event. */
+ public void disconnect() {
+ try {
+ synchronized (rawSocketLock) {
+ if (rawSocket != null) {
+ rawSocket.close();
+ rawSocket = null;
+ out = null;
+
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ eventListener.onTCPClose();
+ }
+ });
+ }
+ }
+ } catch (IOException e) {
+ reportError("Failed to close rawSocket: " + e.getMessage());
+ }
+ }
+
+ /**
+ * Sends a message on the socket. Should only be called on the executor thread.
+ */
+ public void send(String message) {
+ Log.v(TAG, "Send: " + message);
+
+ synchronized (rawSocketLock) {
+ if (out == null) {
+ reportError("Sending data on closed socket.");
+ return;
+ }
+
+ out.write(message + "\n");
+ out.flush();
+ }
+ }
+ }
+
+ private class TCPSocketServer extends TCPSocket {
+ // Server socket is also guarded by rawSocketLock.
+ @Nullable
+ private ServerSocket serverSocket;
+
+ final private InetAddress address;
+ final private int port;
+
+ public TCPSocketServer(InetAddress address, int port) {
+ this.address = address;
+ this.port = port;
+ }
+
+ /** Opens a listening socket and waits for a connection. */
+ @Nullable
+ @Override
+ public Socket connect() {
+ Log.d(TAG, "Listening on [" + address.getHostAddress() + "]:" + Integer.toString(port));
+
+ final ServerSocket tempSocket;
+ try {
+ tempSocket = new ServerSocket(port, 0, address);
+ } catch (IOException e) {
+ reportError("Failed to create server socket: " + e.getMessage());
+ return null;
+ }
+
+ synchronized (rawSocketLock) {
+ if (serverSocket != null) {
+ Log.e(TAG, "Server rawSocket was already listening and new will be opened.");
+ }
+
+ serverSocket = tempSocket;
+ }
+
+ try {
+ return tempSocket.accept();
+ } catch (IOException e) {
+ reportError("Failed to receive connection: " + e.getMessage());
+ return null;
+ }
+ }
+
+ /** Closes the listening socket and calls super. */
+ @Override
+ public void disconnect() {
+ try {
+ synchronized (rawSocketLock) {
+ if (serverSocket != null) {
+ serverSocket.close();
+ serverSocket = null;
+ }
+ }
+ } catch (IOException e) {
+ reportError("Failed to close server socket: " + e.getMessage());
+ }
+
+ super.disconnect();
+ }
+
+ @Override
+ public boolean isServer() {
+ return true;
+ }
+ }
+
+ private class TCPSocketClient extends TCPSocket {
+ final private InetAddress address;
+ final private int port;
+
+ public TCPSocketClient(InetAddress address, int port) {
+ this.address = address;
+ this.port = port;
+ }
+
+ /** Connects to the peer. */
+ @Nullable
+ @Override
+ public Socket connect() {
+ Log.d(TAG, "Connecting to [" + address.getHostAddress() + "]:" + Integer.toString(port));
+
+ try {
+ return new Socket(address, port);
+ } catch (IOException e) {
+ reportError("Failed to connect: " + e.getMessage());
+ return null;
+ }
+ }
+
+ @Override
+ public boolean isServer() {
+ return false;
+ }
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/UnhandledExceptionHandler.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/UnhandledExceptionHandler.java
new file mode 100644
index 0000000000..b256400119
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/UnhandledExceptionHandler.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2013 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.app.Activity;
+import android.app.AlertDialog;
+import android.content.DialogInterface;
+import android.util.Log;
+import android.util.TypedValue;
+import android.widget.ScrollView;
+import android.widget.TextView;
+
+import java.io.PrintWriter;
+import java.io.StringWriter;
+
+/**
+ * Singleton helper: install a default unhandled exception handler which shows
+ * an informative dialog and kills the app. Useful for apps whose
+ * error-handling consists of throwing RuntimeExceptions.
+ * NOTE: almost always more useful to
+ * Thread.setDefaultUncaughtExceptionHandler() rather than
+ * Thread.setUncaughtExceptionHandler(), to apply to background threads as well.
+ */
+public class UnhandledExceptionHandler implements Thread.UncaughtExceptionHandler {
+ private static final String TAG = "AppRTCMobileActivity";
+ private final Activity activity;
+
+ public UnhandledExceptionHandler(final Activity activity) {
+ this.activity = activity;
+ }
+
+ @Override
+ public void uncaughtException(Thread unusedThread, final Throwable e) {
+ activity.runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ String title = "Fatal error: " + getTopLevelCauseMessage(e);
+ String msg = getRecursiveStackTrace(e);
+ TextView errorView = new TextView(activity);
+ errorView.setText(msg);
+ errorView.setTextSize(TypedValue.COMPLEX_UNIT_SP, 8);
+ ScrollView scrollingContainer = new ScrollView(activity);
+ scrollingContainer.addView(errorView);
+ Log.e(TAG, title + "\n\n" + msg);
+ DialogInterface.OnClickListener listener = new DialogInterface.OnClickListener() {
+ @Override
+ public void onClick(DialogInterface dialog, int which) {
+ dialog.dismiss();
+ System.exit(1);
+ }
+ };
+ AlertDialog.Builder builder = new AlertDialog.Builder(activity);
+ builder.setTitle(title)
+ .setView(scrollingContainer)
+ .setPositiveButton("Exit", listener)
+ .show();
+ }
+ });
+ }
+
+ // Returns the Message attached to the original Cause of `t`.
+ private static String getTopLevelCauseMessage(Throwable t) {
+ Throwable topLevelCause = t;
+ while (topLevelCause.getCause() != null) {
+ topLevelCause = topLevelCause.getCause();
+ }
+ return topLevelCause.getMessage();
+ }
+
+ // Returns a human-readable String of the stacktrace in `t`, recursively
+ // through all Causes that led to `t`.
+ private static String getRecursiveStackTrace(Throwable t) {
+ StringWriter writer = new StringWriter();
+ t.printStackTrace(new PrintWriter(writer));
+ return writer.toString();
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/WebSocketChannelClient.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/WebSocketChannelClient.java
new file mode 100644
index 0000000000..5fa410889a
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/WebSocketChannelClient.java
@@ -0,0 +1,296 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.os.Handler;
+import android.util.Log;
+import androidx.annotation.Nullable;
+import de.tavendo.autobahn.WebSocket.WebSocketConnectionObserver;
+import de.tavendo.autobahn.WebSocketConnection;
+import de.tavendo.autobahn.WebSocketException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.List;
+import org.appspot.apprtc.util.AsyncHttpURLConnection;
+import org.appspot.apprtc.util.AsyncHttpURLConnection.AsyncHttpEvents;
+import org.json.JSONException;
+import org.json.JSONObject;
+
+/**
+ * WebSocket client implementation.
+ *
+ * <p>All public methods should be called from a looper executor thread
+ * passed in a constructor, otherwise exception will be thrown.
+ * All events are dispatched on the same thread.
+ */
+public class WebSocketChannelClient {
+ private static final String TAG = "WSChannelRTCClient";
+ private static final int CLOSE_TIMEOUT = 1000;
+ private final WebSocketChannelEvents events;
+ private final Handler handler;
+ private WebSocketConnection ws;
+ private String wsServerUrl;
+ private String postServerUrl;
+ @Nullable
+ private String roomID;
+ @Nullable
+ private String clientID;
+ private WebSocketConnectionState state;
+ // Do not remove this member variable. If this is removed, the observer gets garbage collected and
+ // this causes test breakages.
+ private WebSocketObserver wsObserver;
+ private final Object closeEventLock = new Object();
+ private boolean closeEvent;
+ // WebSocket send queue. Messages are added to the queue when WebSocket
+ // client is not registered and are consumed in register() call.
+ private final List<String> wsSendQueue = new ArrayList<>();
+
+ /**
+ * Possible WebSocket connection states.
+ */
+ public enum WebSocketConnectionState { NEW, CONNECTED, REGISTERED, CLOSED, ERROR }
+
+ /**
+ * Callback interface for messages delivered on WebSocket.
+ * All events are dispatched from a looper executor thread.
+ */
+ public interface WebSocketChannelEvents {
+ void onWebSocketMessage(final String message);
+ void onWebSocketClose();
+ void onWebSocketError(final String description);
+ }
+
+ public WebSocketChannelClient(Handler handler, WebSocketChannelEvents events) {
+ this.handler = handler;
+ this.events = events;
+ roomID = null;
+ clientID = null;
+ state = WebSocketConnectionState.NEW;
+ }
+
+ public WebSocketConnectionState getState() {
+ return state;
+ }
+
+ public void connect(final String wsUrl, final String postUrl) {
+ checkIfCalledOnValidThread();
+ if (state != WebSocketConnectionState.NEW) {
+ Log.e(TAG, "WebSocket is already connected.");
+ return;
+ }
+ wsServerUrl = wsUrl;
+ postServerUrl = postUrl;
+ closeEvent = false;
+
+ Log.d(TAG, "Connecting WebSocket to: " + wsUrl + ". Post URL: " + postUrl);
+ ws = new WebSocketConnection();
+ wsObserver = new WebSocketObserver();
+ try {
+ ws.connect(new URI(wsServerUrl), wsObserver);
+ } catch (URISyntaxException e) {
+ reportError("URI error: " + e.getMessage());
+ } catch (WebSocketException e) {
+ reportError("WebSocket connection error: " + e.getMessage());
+ }
+ }
+
+ public void register(final String roomID, final String clientID) {
+ checkIfCalledOnValidThread();
+ this.roomID = roomID;
+ this.clientID = clientID;
+ if (state != WebSocketConnectionState.CONNECTED) {
+ Log.w(TAG, "WebSocket register() in state " + state);
+ return;
+ }
+ Log.d(TAG, "Registering WebSocket for room " + roomID + ". ClientID: " + clientID);
+ JSONObject json = new JSONObject();
+ try {
+ json.put("cmd", "register");
+ json.put("roomid", roomID);
+ json.put("clientid", clientID);
+ Log.d(TAG, "C->WSS: " + json.toString());
+ ws.sendTextMessage(json.toString());
+ state = WebSocketConnectionState.REGISTERED;
+ // Send any previously accumulated messages.
+ for (String sendMessage : wsSendQueue) {
+ send(sendMessage);
+ }
+ wsSendQueue.clear();
+ } catch (JSONException e) {
+ reportError("WebSocket register JSON error: " + e.getMessage());
+ }
+ }
+
+ public void send(String message) {
+ checkIfCalledOnValidThread();
+ switch (state) {
+ case NEW:
+ case CONNECTED:
+ // Store outgoing messages and send them after websocket client
+ // is registered.
+ Log.d(TAG, "WS ACC: " + message);
+ wsSendQueue.add(message);
+ return;
+ case ERROR:
+ case CLOSED:
+ Log.e(TAG, "WebSocket send() in error or closed state : " + message);
+ return;
+ case REGISTERED:
+ JSONObject json = new JSONObject();
+ try {
+ json.put("cmd", "send");
+ json.put("msg", message);
+ message = json.toString();
+ Log.d(TAG, "C->WSS: " + message);
+ ws.sendTextMessage(message);
+ } catch (JSONException e) {
+ reportError("WebSocket send JSON error: " + e.getMessage());
+ }
+ break;
+ }
+ }
+
+ // This call can be used to send WebSocket messages before WebSocket
+ // connection is opened.
+ public void post(String message) {
+ checkIfCalledOnValidThread();
+ sendWSSMessage("POST", message);
+ }
+
+ public void disconnect(boolean waitForComplete) {
+ checkIfCalledOnValidThread();
+ Log.d(TAG, "Disconnect WebSocket. State: " + state);
+ if (state == WebSocketConnectionState.REGISTERED) {
+ // Send "bye" to WebSocket server.
+ send("{\"type\": \"bye\"}");
+ state = WebSocketConnectionState.CONNECTED;
+ // Send http DELETE to http WebSocket server.
+ sendWSSMessage("DELETE", "");
+ }
+ // Close WebSocket in CONNECTED or ERROR states only.
+ if (state == WebSocketConnectionState.CONNECTED || state == WebSocketConnectionState.ERROR) {
+ ws.disconnect();
+ state = WebSocketConnectionState.CLOSED;
+
+ // Wait for websocket close event to prevent websocket library from
+ // sending any pending messages to deleted looper thread.
+ if (waitForComplete) {
+ synchronized (closeEventLock) {
+ while (!closeEvent) {
+ try {
+ closeEventLock.wait(CLOSE_TIMEOUT);
+ break;
+ } catch (InterruptedException e) {
+ Log.e(TAG, "Wait error: " + e.toString());
+ }
+ }
+ }
+ }
+ }
+ Log.d(TAG, "Disconnecting WebSocket done.");
+ }
+
+ private void reportError(final String errorMessage) {
+ Log.e(TAG, errorMessage);
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (state != WebSocketConnectionState.ERROR) {
+ state = WebSocketConnectionState.ERROR;
+ events.onWebSocketError(errorMessage);
+ }
+ }
+ });
+ }
+
+ // Asynchronously send POST/DELETE to WebSocket server.
+ private void sendWSSMessage(final String method, final String message) {
+ String postUrl = postServerUrl + "/" + roomID + "/" + clientID;
+ Log.d(TAG, "WS " + method + " : " + postUrl + " : " + message);
+ AsyncHttpURLConnection httpConnection =
+ new AsyncHttpURLConnection(method, postUrl, message, new AsyncHttpEvents() {
+ @Override
+ public void onHttpError(String errorMessage) {
+ reportError("WS " + method + " error: " + errorMessage);
+ }
+
+ @Override
+ public void onHttpComplete(String response) {}
+ });
+ httpConnection.send();
+ }
+
+ // Helper method for debugging purposes. Ensures that WebSocket method is
+ // called on a looper thread.
+ private void checkIfCalledOnValidThread() {
+ if (Thread.currentThread() != handler.getLooper().getThread()) {
+ throw new IllegalStateException("WebSocket method is not called on valid thread");
+ }
+ }
+
+ private class WebSocketObserver implements WebSocketConnectionObserver {
+ @Override
+ public void onOpen() {
+ Log.d(TAG, "WebSocket connection opened to: " + wsServerUrl);
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ state = WebSocketConnectionState.CONNECTED;
+ // Check if we have pending register request.
+ if (roomID != null && clientID != null) {
+ register(roomID, clientID);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onClose(WebSocketCloseNotification code, String reason) {
+ Log.d(TAG, "WebSocket connection closed. Code: " + code + ". Reason: " + reason + ". State: "
+ + state);
+ synchronized (closeEventLock) {
+ closeEvent = true;
+ closeEventLock.notify();
+ }
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (state != WebSocketConnectionState.CLOSED) {
+ state = WebSocketConnectionState.CLOSED;
+ events.onWebSocketClose();
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onTextMessage(String payload) {
+ Log.d(TAG, "WSS->C: " + payload);
+ final String message = payload;
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (state == WebSocketConnectionState.CONNECTED
+ || state == WebSocketConnectionState.REGISTERED) {
+ events.onWebSocketMessage(message);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onRawTextMessage(byte[] payload) {}
+
+ @Override
+ public void onBinaryMessage(byte[] payload) {}
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/WebSocketRTCClient.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/WebSocketRTCClient.java
new file mode 100644
index 0000000000..cbfdb21c91
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/WebSocketRTCClient.java
@@ -0,0 +1,427 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.util.Log;
+import androidx.annotation.Nullable;
+import org.appspot.apprtc.RoomParametersFetcher.RoomParametersFetcherEvents;
+import org.appspot.apprtc.WebSocketChannelClient.WebSocketChannelEvents;
+import org.appspot.apprtc.WebSocketChannelClient.WebSocketConnectionState;
+import org.appspot.apprtc.util.AsyncHttpURLConnection;
+import org.appspot.apprtc.util.AsyncHttpURLConnection.AsyncHttpEvents;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.webrtc.IceCandidate;
+import org.webrtc.SessionDescription;
+
+/**
+ * Negotiates signaling for chatting with https://appr.tc "rooms".
+ * Uses the client<->server specifics of the apprtc AppEngine webapp.
+ *
+ * <p>To use: create an instance of this object (registering a message handler) and
+ * call connectToRoom(). Once room connection is established
+ * onConnectedToRoom() callback with room parameters is invoked.
+ * Messages to other party (with local Ice candidates and answer SDP) can
+ * be sent after WebSocket connection is established.
+ */
+public class WebSocketRTCClient implements AppRTCClient, WebSocketChannelEvents {
+ private static final String TAG = "WSRTCClient";
+ private static final String ROOM_JOIN = "join";
+ private static final String ROOM_MESSAGE = "message";
+ private static final String ROOM_LEAVE = "leave";
+
+ private enum ConnectionState { NEW, CONNECTED, CLOSED, ERROR }
+
+ private enum MessageType { MESSAGE, LEAVE }
+
+ private final Handler handler;
+ private boolean initiator;
+ private SignalingEvents events;
+ private WebSocketChannelClient wsClient;
+ private ConnectionState roomState;
+ private RoomConnectionParameters connectionParameters;
+ private String messageUrl;
+ private String leaveUrl;
+
+ public WebSocketRTCClient(SignalingEvents events) {
+ this.events = events;
+ roomState = ConnectionState.NEW;
+ final HandlerThread handlerThread = new HandlerThread(TAG);
+ handlerThread.start();
+ handler = new Handler(handlerThread.getLooper());
+ }
+
+ // --------------------------------------------------------------------
+ // AppRTCClient interface implementation.
+ // Asynchronously connect to an AppRTC room URL using supplied connection
+ // parameters, retrieves room parameters and connect to WebSocket server.
+ @Override
+ public void connectToRoom(RoomConnectionParameters connectionParameters) {
+ this.connectionParameters = connectionParameters;
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ connectToRoomInternal();
+ }
+ });
+ }
+
+ @Override
+ public void disconnectFromRoom() {
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ disconnectFromRoomInternal();
+ handler.getLooper().quit();
+ }
+ });
+ }
+
+ // Connects to room - function runs on a local looper thread.
+ private void connectToRoomInternal() {
+ String connectionUrl = getConnectionUrl(connectionParameters);
+ Log.d(TAG, "Connect to room: " + connectionUrl);
+ roomState = ConnectionState.NEW;
+ wsClient = new WebSocketChannelClient(handler, this);
+
+ RoomParametersFetcherEvents callbacks = new RoomParametersFetcherEvents() {
+ @Override
+ public void onSignalingParametersReady(final SignalingParameters params) {
+ WebSocketRTCClient.this.handler.post(new Runnable() {
+ @Override
+ public void run() {
+ WebSocketRTCClient.this.signalingParametersReady(params);
+ }
+ });
+ }
+
+ @Override
+ public void onSignalingParametersError(String description) {
+ WebSocketRTCClient.this.reportError(description);
+ }
+ };
+
+ new RoomParametersFetcher(connectionUrl, null, callbacks).makeRequest();
+ }
+
+ // Disconnect from room and send bye messages - runs on a local looper thread.
+ private void disconnectFromRoomInternal() {
+ Log.d(TAG, "Disconnect. Room state: " + roomState);
+ if (roomState == ConnectionState.CONNECTED) {
+ Log.d(TAG, "Closing room.");
+ sendPostMessage(MessageType.LEAVE, leaveUrl, null);
+ }
+ roomState = ConnectionState.CLOSED;
+ if (wsClient != null) {
+ wsClient.disconnect(true);
+ }
+ }
+
+ // Helper functions to get connection, post message and leave message URLs
+ private String getConnectionUrl(RoomConnectionParameters connectionParameters) {
+ return connectionParameters.roomUrl + "/" + ROOM_JOIN + "/" + connectionParameters.roomId
+ + getQueryString(connectionParameters);
+ }
+
+ private String getMessageUrl(
+ RoomConnectionParameters connectionParameters, SignalingParameters signalingParameters) {
+ return connectionParameters.roomUrl + "/" + ROOM_MESSAGE + "/" + connectionParameters.roomId
+ + "/" + signalingParameters.clientId + getQueryString(connectionParameters);
+ }
+
+ private String getLeaveUrl(
+ RoomConnectionParameters connectionParameters, SignalingParameters signalingParameters) {
+ return connectionParameters.roomUrl + "/" + ROOM_LEAVE + "/" + connectionParameters.roomId + "/"
+ + signalingParameters.clientId + getQueryString(connectionParameters);
+ }
+
+ private String getQueryString(RoomConnectionParameters connectionParameters) {
+ if (connectionParameters.urlParameters != null) {
+ return "?" + connectionParameters.urlParameters;
+ } else {
+ return "";
+ }
+ }
+
+ // Callback issued when room parameters are extracted. Runs on local
+ // looper thread.
+ private void signalingParametersReady(final SignalingParameters signalingParameters) {
+ Log.d(TAG, "Room connection completed.");
+ if (connectionParameters.loopback
+ && (!signalingParameters.initiator || signalingParameters.offerSdp != null)) {
+ reportError("Loopback room is busy.");
+ return;
+ }
+ if (!connectionParameters.loopback && !signalingParameters.initiator
+ && signalingParameters.offerSdp == null) {
+ Log.w(TAG, "No offer SDP in room response.");
+ }
+ initiator = signalingParameters.initiator;
+ messageUrl = getMessageUrl(connectionParameters, signalingParameters);
+ leaveUrl = getLeaveUrl(connectionParameters, signalingParameters);
+ Log.d(TAG, "Message URL: " + messageUrl);
+ Log.d(TAG, "Leave URL: " + leaveUrl);
+ roomState = ConnectionState.CONNECTED;
+
+ // Fire connection and signaling parameters events.
+ events.onConnectedToRoom(signalingParameters);
+
+ // Connect and register WebSocket client.
+ wsClient.connect(signalingParameters.wssUrl, signalingParameters.wssPostUrl);
+ wsClient.register(connectionParameters.roomId, signalingParameters.clientId);
+ }
+
+ // Send local offer SDP to the other participant.
+ @Override
+ public void sendOfferSdp(final SessionDescription sdp) {
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (roomState != ConnectionState.CONNECTED) {
+ reportError("Sending offer SDP in non connected state.");
+ return;
+ }
+ JSONObject json = new JSONObject();
+ jsonPut(json, "sdp", sdp.description);
+ jsonPut(json, "type", "offer");
+ sendPostMessage(MessageType.MESSAGE, messageUrl, json.toString());
+ if (connectionParameters.loopback) {
+ // In loopback mode rename this offer to answer and route it back.
+ SessionDescription sdpAnswer = new SessionDescription(
+ SessionDescription.Type.fromCanonicalForm("answer"), sdp.description);
+ events.onRemoteDescription(sdpAnswer);
+ }
+ }
+ });
+ }
+
+ // Send local answer SDP to the other participant.
+ @Override
+ public void sendAnswerSdp(final SessionDescription sdp) {
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (connectionParameters.loopback) {
+ Log.e(TAG, "Sending answer in loopback mode.");
+ return;
+ }
+ JSONObject json = new JSONObject();
+ jsonPut(json, "sdp", sdp.description);
+ jsonPut(json, "type", "answer");
+ wsClient.send(json.toString());
+ }
+ });
+ }
+
+ // Send Ice candidate to the other participant.
+ @Override
+ public void sendLocalIceCandidate(final IceCandidate candidate) {
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ JSONObject json = new JSONObject();
+ jsonPut(json, "type", "candidate");
+ jsonPut(json, "label", candidate.sdpMLineIndex);
+ jsonPut(json, "id", candidate.sdpMid);
+ jsonPut(json, "candidate", candidate.sdp);
+ if (initiator) {
+ // Call initiator sends ice candidates to GAE server.
+ if (roomState != ConnectionState.CONNECTED) {
+ reportError("Sending ICE candidate in non connected state.");
+ return;
+ }
+ sendPostMessage(MessageType.MESSAGE, messageUrl, json.toString());
+ if (connectionParameters.loopback) {
+ events.onRemoteIceCandidate(candidate);
+ }
+ } else {
+ // Call receiver sends ice candidates to websocket server.
+ wsClient.send(json.toString());
+ }
+ }
+ });
+ }
+
+ // Send removed Ice candidates to the other participant.
+ @Override
+ public void sendLocalIceCandidateRemovals(final IceCandidate[] candidates) {
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ JSONObject json = new JSONObject();
+ jsonPut(json, "type", "remove-candidates");
+ JSONArray jsonArray = new JSONArray();
+ for (final IceCandidate candidate : candidates) {
+ jsonArray.put(toJsonCandidate(candidate));
+ }
+ jsonPut(json, "candidates", jsonArray);
+ if (initiator) {
+ // Call initiator sends ice candidates to GAE server.
+ if (roomState != ConnectionState.CONNECTED) {
+ reportError("Sending ICE candidate removals in non connected state.");
+ return;
+ }
+ sendPostMessage(MessageType.MESSAGE, messageUrl, json.toString());
+ if (connectionParameters.loopback) {
+ events.onRemoteIceCandidatesRemoved(candidates);
+ }
+ } else {
+ // Call receiver sends ice candidates to websocket server.
+ wsClient.send(json.toString());
+ }
+ }
+ });
+ }
+
+ // --------------------------------------------------------------------
+ // WebSocketChannelEvents interface implementation.
+ // All events are called by WebSocketChannelClient on a local looper thread
+ // (passed to WebSocket client constructor).
+ @Override
+ public void onWebSocketMessage(final String msg) {
+ if (wsClient.getState() != WebSocketConnectionState.REGISTERED) {
+ Log.e(TAG, "Got WebSocket message in non registered state.");
+ return;
+ }
+ try {
+ JSONObject json = new JSONObject(msg);
+ String msgText = json.getString("msg");
+ String errorText = json.optString("error");
+ if (msgText.length() > 0) {
+ json = new JSONObject(msgText);
+ String type = json.optString("type");
+ if (type.equals("candidate")) {
+ events.onRemoteIceCandidate(toJavaCandidate(json));
+ } else if (type.equals("remove-candidates")) {
+ JSONArray candidateArray = json.getJSONArray("candidates");
+ IceCandidate[] candidates = new IceCandidate[candidateArray.length()];
+ for (int i = 0; i < candidateArray.length(); ++i) {
+ candidates[i] = toJavaCandidate(candidateArray.getJSONObject(i));
+ }
+ events.onRemoteIceCandidatesRemoved(candidates);
+ } else if (type.equals("answer")) {
+ if (initiator) {
+ SessionDescription sdp = new SessionDescription(
+ SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
+ events.onRemoteDescription(sdp);
+ } else {
+ reportError("Received answer for call initiator: " + msg);
+ }
+ } else if (type.equals("offer")) {
+ if (!initiator) {
+ SessionDescription sdp = new SessionDescription(
+ SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
+ events.onRemoteDescription(sdp);
+ } else {
+ reportError("Received offer for call receiver: " + msg);
+ }
+ } else if (type.equals("bye")) {
+ events.onChannelClose();
+ } else {
+ reportError("Unexpected WebSocket message: " + msg);
+ }
+ } else {
+ if (errorText != null && errorText.length() > 0) {
+ reportError("WebSocket error message: " + errorText);
+ } else {
+ reportError("Unexpected WebSocket message: " + msg);
+ }
+ }
+ } catch (JSONException e) {
+ reportError("WebSocket message JSON parsing error: " + e.toString());
+ }
+ }
+
+ @Override
+ public void onWebSocketClose() {
+ events.onChannelClose();
+ }
+
+ @Override
+ public void onWebSocketError(String description) {
+ reportError("WebSocket error: " + description);
+ }
+
+ // --------------------------------------------------------------------
+ // Helper functions.
+ private void reportError(final String errorMessage) {
+ Log.e(TAG, errorMessage);
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (roomState != ConnectionState.ERROR) {
+ roomState = ConnectionState.ERROR;
+ events.onChannelError(errorMessage);
+ }
+ }
+ });
+ }
+
+ // Put a `key`->`value` mapping in `json`.
+ private static void jsonPut(JSONObject json, String key, Object value) {
+ try {
+ json.put(key, value);
+ } catch (JSONException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ // Send SDP or ICE candidate to a room server.
+ private void sendPostMessage(
+ final MessageType messageType, final String url, @Nullable final String message) {
+ String logInfo = url;
+ if (message != null) {
+ logInfo += ". Message: " + message;
+ }
+ Log.d(TAG, "C->GAE: " + logInfo);
+ AsyncHttpURLConnection httpConnection =
+ new AsyncHttpURLConnection("POST", url, message, new AsyncHttpEvents() {
+ @Override
+ public void onHttpError(String errorMessage) {
+ reportError("GAE POST error: " + errorMessage);
+ }
+
+ @Override
+ public void onHttpComplete(String response) {
+ if (messageType == MessageType.MESSAGE) {
+ try {
+ JSONObject roomJson = new JSONObject(response);
+ String result = roomJson.getString("result");
+ if (!result.equals("SUCCESS")) {
+ reportError("GAE POST error: " + result);
+ }
+ } catch (JSONException e) {
+ reportError("GAE POST JSON error: " + e.toString());
+ }
+ }
+ }
+ });
+ httpConnection.send();
+ }
+
+ // Converts a Java candidate to a JSONObject.
+ private JSONObject toJsonCandidate(final IceCandidate candidate) {
+ JSONObject json = new JSONObject();
+ jsonPut(json, "label", candidate.sdpMLineIndex);
+ jsonPut(json, "id", candidate.sdpMid);
+ jsonPut(json, "candidate", candidate.sdp);
+ return json;
+ }
+
+ // Converts a JSON candidate to a Java object.
+ IceCandidate toJavaCandidate(JSONObject json) throws JSONException {
+ return new IceCandidate(
+ json.getString("id"), json.getInt("label"), json.getString("candidate"));
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/util/AppRTCUtils.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/util/AppRTCUtils.java
new file mode 100644
index 0000000000..ee7f8c0416
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/util/AppRTCUtils.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc.util;
+
+import android.os.Build;
+import android.util.Log;
+
+/**
+ * AppRTCUtils provides helper functions for managing thread safety.
+ */
+public final class AppRTCUtils {
+ private AppRTCUtils() {}
+
+ /** Helper method which throws an exception when an assertion has failed. */
+ public static void assertIsTrue(boolean condition) {
+ if (!condition) {
+ throw new AssertionError("Expected condition to be true");
+ }
+ }
+
+ /** Helper method for building a string of thread information.*/
+ public static String getThreadInfo() {
+ return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId()
+ + "]";
+ }
+
+ /** Information about the current build, taken from system properties. */
+ public static void logDeviceInfo(String tag) {
+ Log.d(tag, "Android SDK: " + Build.VERSION.SDK_INT + ", "
+ + "Release: " + Build.VERSION.RELEASE + ", "
+ + "Brand: " + Build.BRAND + ", "
+ + "Device: " + Build.DEVICE + ", "
+ + "Id: " + Build.ID + ", "
+ + "Hardware: " + Build.HARDWARE + ", "
+ + "Manufacturer: " + Build.MANUFACTURER + ", "
+ + "Model: " + Build.MODEL + ", "
+ + "Product: " + Build.PRODUCT);
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/util/AsyncHttpURLConnection.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/util/AsyncHttpURLConnection.java
new file mode 100644
index 0000000000..93028ae783
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/util/AsyncHttpURLConnection.java
@@ -0,0 +1,115 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc.util;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.net.HttpURLConnection;
+import java.net.SocketTimeoutException;
+import java.net.URL;
+import java.util.Scanner;
+
+/**
+ * Asynchronous http requests implementation.
+ */
+public class AsyncHttpURLConnection {
+ private static final int HTTP_TIMEOUT_MS = 8000;
+ private static final String HTTP_ORIGIN = "https://appr.tc";
+ private final String method;
+ private final String url;
+ private final String message;
+ private final AsyncHttpEvents events;
+ private String contentType;
+
+ /**
+ * Http requests callbacks.
+ */
+ public interface AsyncHttpEvents {
+ void onHttpError(String errorMessage);
+ void onHttpComplete(String response);
+ }
+
+ public AsyncHttpURLConnection(String method, String url, String message, AsyncHttpEvents events) {
+ this.method = method;
+ this.url = url;
+ this.message = message;
+ this.events = events;
+ }
+
+ public void setContentType(String contentType) {
+ this.contentType = contentType;
+ }
+
+ public void send() {
+ new Thread(this ::sendHttpMessage).start();
+ }
+
+ @SuppressWarnings("UseNetworkAnnotations")
+ private void sendHttpMessage() {
+ try {
+ HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
+ byte[] postData = new byte[0];
+ if (message != null) {
+ postData = message.getBytes("UTF-8");
+ }
+ connection.setRequestMethod(method);
+ connection.setUseCaches(false);
+ connection.setDoInput(true);
+ connection.setConnectTimeout(HTTP_TIMEOUT_MS);
+ connection.setReadTimeout(HTTP_TIMEOUT_MS);
+ // TODO(glaznev) - query request origin from pref_room_server_url_key preferences.
+ connection.addRequestProperty("origin", HTTP_ORIGIN);
+ boolean doOutput = false;
+ if (method.equals("POST")) {
+ doOutput = true;
+ connection.setDoOutput(true);
+ connection.setFixedLengthStreamingMode(postData.length);
+ }
+ if (contentType == null) {
+ connection.setRequestProperty("Content-Type", "text/plain; charset=utf-8");
+ } else {
+ connection.setRequestProperty("Content-Type", contentType);
+ }
+
+ // Send POST request.
+ if (doOutput && postData.length > 0) {
+ OutputStream outStream = connection.getOutputStream();
+ outStream.write(postData);
+ outStream.close();
+ }
+
+ // Get response.
+ int responseCode = connection.getResponseCode();
+ if (responseCode != 200) {
+ events.onHttpError("Non-200 response to " + method + " to URL: " + url + " : "
+ + connection.getHeaderField(null));
+ connection.disconnect();
+ return;
+ }
+ InputStream responseStream = connection.getInputStream();
+ String response = drainStream(responseStream);
+ responseStream.close();
+ connection.disconnect();
+ events.onHttpComplete(response);
+ } catch (SocketTimeoutException e) {
+ events.onHttpError("HTTP " + method + " to " + url + " timeout");
+ } catch (IOException e) {
+ events.onHttpError("HTTP " + method + " to " + url + " error: " + e.getMessage());
+ }
+ }
+
+ // Return the contents of an InputStream as a String.
+ private static String drainStream(InputStream in) {
+ Scanner s = new Scanner(in, "UTF-8").useDelimiter("\\A");
+ return s.hasNext() ? s.next() : "";
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/start_loopback_stubbed_camera_saved_video_out.py b/third_party/libwebrtc/examples/androidapp/start_loopback_stubbed_camera_saved_video_out.py
new file mode 100644
index 0000000000..b1cf84611f
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/start_loopback_stubbed_camera_saved_video_out.py
@@ -0,0 +1,127 @@
+# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+from optparse import OptionParser
+import random
+import string
+import subprocess
+import sys
+import time
+
+from com.android.monkeyrunner import MonkeyRunner, MonkeyDevice
+
+
+def main():
+ parser = OptionParser()
+
+ parser.add_option('--devname', dest='devname', help='The device id')
+
+ parser.add_option(
+ '--videooutsave',
+ dest='videooutsave',
+ help='The path where to save the video out file on local computer')
+
+ parser.add_option('--videoout',
+ dest='videoout',
+ help='The path where to put the video out file')
+
+ parser.add_option('--videoout_width',
+ dest='videoout_width',
+ type='int',
+ help='The width for the video out file')
+
+ parser.add_option('--videoout_height',
+ dest='videoout_height',
+ type='int',
+ help='The height for the video out file')
+
+ parser.add_option(
+ '--videoin',
+ dest='videoin',
+ help='The path where to read input file instead of camera')
+
+ parser.add_option('--call_length',
+ dest='call_length',
+ type='int',
+ help='The length of the call')
+
+ (options, args) = parser.parse_args()
+
+ print(options, args)
+
+ devname = options.devname
+
+ videoin = options.videoin
+
+ videoout = options.videoout
+ videoout_width = options.videoout_width
+ videoout_height = options.videoout_height
+
+ videooutsave = options.videooutsave
+
+ call_length = options.call_length or 10
+
+ room = ''.join(
+ random.choice(string.ascii_letters + string.digits) for _ in range(8))
+
+ # Delete output video file.
+ if videoout:
+ subprocess.check_call(
+ ['adb', '-s', devname, 'shell', 'rm', '-f', videoout])
+
+ device = MonkeyRunner.waitForConnection(2, devname)
+
+ extras = {
+ 'org.appspot.apprtc.USE_VALUES_FROM_INTENT': True,
+ 'org.appspot.apprtc.AUDIOCODEC': 'OPUS',
+ 'org.appspot.apprtc.LOOPBACK': True,
+ 'org.appspot.apprtc.VIDEOCODEC': 'VP8',
+ 'org.appspot.apprtc.CAPTURETOTEXTURE': False,
+ 'org.appspot.apprtc.CAMERA2': False,
+ 'org.appspot.apprtc.ROOMID': room
+ }
+
+ if videoin:
+ extras.update({'org.appspot.apprtc.VIDEO_FILE_AS_CAMERA': videoin})
+
+ if videoout:
+ extras.update({
+ 'org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE':
+ videoout,
+ 'org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_WIDTH':
+ videoout_width,
+ 'org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT':
+ videoout_height
+ })
+
+ print extras
+
+ device.startActivity(data='https://appr.tc',
+ action='android.intent.action.VIEW',
+ component='org.appspot.apprtc/.ConnectActivity',
+ extras=extras)
+
+ print 'Running a call for %d seconds' % call_length
+ for _ in xrange(call_length):
+ sys.stdout.write('.')
+ sys.stdout.flush()
+ time.sleep(1)
+ print '\nEnding call.'
+
+ # Press back to end the call. Will end on both sides.
+ device.press('KEYCODE_BACK', MonkeyDevice.DOWN_AND_UP)
+
+ if videooutsave:
+ time.sleep(2)
+
+ subprocess.check_call(
+ ['adb', '-s', devname, 'pull', videoout, videooutsave])
+
+
+if __name__ == '__main__':
+ main()
diff --git a/third_party/libwebrtc/examples/androidapp/third_party/autobanh/BUILD.gn b/third_party/libwebrtc/examples/androidapp/third_party/autobanh/BUILD.gn
new file mode 100644
index 0000000000..b671239bae
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/third_party/autobanh/BUILD.gn
@@ -0,0 +1,15 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+if (is_android) {
+ import("//build/config/android/rules.gni")
+
+ android_java_prebuilt("autobanh_java") {
+ jar_path = "lib/autobanh.jar"
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/third_party/autobanh/LICENSE b/third_party/libwebrtc/examples/androidapp/third_party/autobanh/LICENSE
new file mode 100644
index 0000000000..f433b1a53f
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/third_party/autobanh/LICENSE
@@ -0,0 +1,177 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
diff --git a/third_party/libwebrtc/examples/androidapp/third_party/autobanh/LICENSE.md b/third_party/libwebrtc/examples/androidapp/third_party/autobanh/LICENSE.md
new file mode 100644
index 0000000000..2079e90d6b
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/third_party/autobanh/LICENSE.md
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2014 Cameron Lowell Palmer
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/third_party/libwebrtc/examples/androidapp/third_party/autobanh/NOTICE b/third_party/libwebrtc/examples/androidapp/third_party/autobanh/NOTICE
new file mode 100644
index 0000000000..91ed7dfe0e
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/third_party/autobanh/NOTICE
@@ -0,0 +1,3 @@
+AutobahnAndroid
+Copyright 2011,2012 Tavendo GmbH. Licensed under Apache 2.0
+This product includes software developed at Tavendo GmbH http://www.tavendo.de
diff --git a/third_party/libwebrtc/examples/androidapp/third_party/autobanh/lib/autobanh.jar b/third_party/libwebrtc/examples/androidapp/third_party/autobanh/lib/autobanh.jar
new file mode 100644
index 0000000000..5a10b7f3f1
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/third_party/autobanh/lib/autobanh.jar
Binary files differ