summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/examples
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
commit26a029d407be480d791972afb5975cf62c9360a6 (patch)
treef435a8308119effd964b339f76abb83a57c29483 /third_party/libwebrtc/examples
parentInitial commit. (diff)
downloadfirefox-26a029d407be480d791972afb5975cf62c9360a6.tar.xz
firefox-26a029d407be480d791972afb5975cf62c9360a6.zip
Adding upstream version 124.0.1.upstream/124.0.1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/libwebrtc/examples')
-rw-r--r--third_party/libwebrtc/examples/BUILD.gn966
-rw-r--r--third_party/libwebrtc/examples/DEPS13
-rw-r--r--third_party/libwebrtc/examples/OWNERS4
-rw-r--r--third_party/libwebrtc/examples/aarproject/.gitignore16
-rw-r--r--third_party/libwebrtc/examples/aarproject/OWNERS1
-rw-r--r--third_party/libwebrtc/examples/aarproject/app/.gitignore1
-rw-r--r--third_party/libwebrtc/examples/aarproject/app/build.gradle53
-rw-r--r--third_party/libwebrtc/examples/aarproject/app/proguard-rules.pro25
-rw-r--r--third_party/libwebrtc/examples/aarproject/build.gradle27
-rw-r--r--third_party/libwebrtc/examples/aarproject/gradle.properties22
-rw-r--r--third_party/libwebrtc/examples/aarproject/local.properties2
-rw-r--r--third_party/libwebrtc/examples/aarproject/settings.gradle1
-rw-r--r--third_party/libwebrtc/examples/androidapp/AndroidManifest.xml60
-rw-r--r--third_party/libwebrtc/examples/androidapp/OWNERS2
-rw-r--r--third_party/libwebrtc/examples/androidapp/README23
-rw-r--r--third_party/libwebrtc/examples/androidapp/ant.properties17
-rw-r--r--third_party/libwebrtc/examples/androidapp/build.xml92
-rw-r--r--third_party/libwebrtc/examples/androidapp/project.properties16
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/disconnect.pngbin0 -> 1404 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_action_full_screen.pngbin0 -> 587 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_action_return_from_full_screen.pngbin0 -> 663 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_launcher.pngbin0 -> 2486 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_loopback_call.pngbin0 -> 1859 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/disconnect.pngbin0 -> 1404 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_action_full_screen.pngbin0 -> 461 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_action_return_from_full_screen.pngbin0 -> 477 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_launcher.pngbin0 -> 2502 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_loopback_call.pngbin0 -> 1859 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/disconnect.pngbin0 -> 1404 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_action_full_screen.pngbin0 -> 461 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_action_return_from_full_screen.pngbin0 -> 477 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_launcher.pngbin0 -> 1700 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_loopback_call.pngbin0 -> 1859 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/disconnect.pngbin0 -> 1404 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_action_full_screen.pngbin0 -> 743 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_action_return_from_full_screen.pngbin0 -> 761 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_launcher.pngbin0 -> 3364 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_loopback_call.pngbin0 -> 1859 bytes
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/layout/activity_call.xml34
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/layout/activity_connect.xml80
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/layout/fragment_call.xml77
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/layout/fragment_hud.xml27
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/menu/connect_menu.xml13
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/values-v17/styles.xml10
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/values-v21/styles.xml4
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/values/arrays.xml61
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/values/strings.xml224
-rw-r--r--third_party/libwebrtc/examples/androidapp/res/xml/preferences.xml247
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java594
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCBluetoothManager.java532
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCClient.java137
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCProximitySensor.java158
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CallActivity.java962
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CallFragment.java137
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CaptureQualityController.java110
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java666
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CpuMonitor.java521
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/DirectRTCClient.java346
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/HudFragment.java102
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java1400
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/RecordedAudioToFileController.java143
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/RoomParametersFetcher.java226
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/RtcEventLog.java73
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/SettingsActivity.java317
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/SettingsFragment.java26
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/TCPChannelClient.java362
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/UnhandledExceptionHandler.java85
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/WebSocketChannelClient.java296
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/WebSocketRTCClient.java427
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/util/AppRTCUtils.java47
-rw-r--r--third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/util/AsyncHttpURLConnection.java115
-rw-r--r--third_party/libwebrtc/examples/androidapp/start_loopback_stubbed_camera_saved_video_out.py127
-rw-r--r--third_party/libwebrtc/examples/androidapp/third_party/autobanh/BUILD.gn15
-rw-r--r--third_party/libwebrtc/examples/androidapp/third_party/autobanh/LICENSE177
-rw-r--r--third_party/libwebrtc/examples/androidapp/third_party/autobanh/LICENSE.md21
-rw-r--r--third_party/libwebrtc/examples/androidapp/third_party/autobanh/NOTICE3
-rw-r--r--third_party/libwebrtc/examples/androidapp/third_party/autobanh/lib/autobanh.jarbin0 -> 45472 bytes
-rw-r--r--third_party/libwebrtc/examples/androidjunit/OWNERS1
-rw-r--r--third_party/libwebrtc/examples/androidjunit/README8
-rw-r--r--third_party/libwebrtc/examples/androidjunit/src/org/appspot/apprtc/BluetoothManagerTest.java268
-rw-r--r--third_party/libwebrtc/examples/androidjunit/src/org/appspot/apprtc/DirectRTCClientTest.java155
-rw-r--r--third_party/libwebrtc/examples/androidjunit/src/org/appspot/apprtc/TCPChannelClientTest.java199
-rw-r--r--third_party/libwebrtc/examples/androidnativeapi/AndroidManifest.xml23
-rw-r--r--third_party/libwebrtc/examples/androidnativeapi/BUILD.gn80
-rw-r--r--third_party/libwebrtc/examples/androidnativeapi/DEPS5
-rw-r--r--third_party/libwebrtc/examples/androidnativeapi/OWNERS1
-rw-r--r--third_party/libwebrtc/examples/androidnativeapi/java/org/webrtc/examples/androidnativeapi/CallClient.java72
-rw-r--r--third_party/libwebrtc/examples/androidnativeapi/java/org/webrtc/examples/androidnativeapi/MainActivity.java120
-rw-r--r--third_party/libwebrtc/examples/androidnativeapi/jni/android_call_client.cc293
-rw-r--r--third_party/libwebrtc/examples/androidnativeapi/jni/android_call_client.h76
-rw-r--r--third_party/libwebrtc/examples/androidnativeapi/jni/onload.cc30
-rw-r--r--third_party/libwebrtc/examples/androidnativeapi/res/layout/activity_main.xml52
-rw-r--r--third_party/libwebrtc/examples/androidnativeapi/res/values/strings.xml5
-rw-r--r--third_party/libwebrtc/examples/androidtests/AndroidManifest.xml26
-rw-r--r--third_party/libwebrtc/examples/androidtests/OWNERS1
-rw-r--r--third_party/libwebrtc/examples/androidtests/README14
-rw-r--r--third_party/libwebrtc/examples/androidtests/ant.properties18
-rw-r--r--third_party/libwebrtc/examples/androidtests/build.xml92
-rw-r--r--third_party/libwebrtc/examples/androidtests/project.properties16
-rw-r--r--third_party/libwebrtc/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java637
-rw-r--r--third_party/libwebrtc/examples/androidtests/third_party/.gitignore3
-rw-r--r--third_party/libwebrtc/examples/androidtests/third_party/README.webrtc10
-rw-r--r--third_party/libwebrtc/examples/androidvoip/AndroidManifest.xml38
-rw-r--r--third_party/libwebrtc/examples/androidvoip/BUILD.gn95
-rw-r--r--third_party/libwebrtc/examples/androidvoip/DEPS3
-rw-r--r--third_party/libwebrtc/examples/androidvoip/OWNERS2
-rw-r--r--third_party/libwebrtc/examples/androidvoip/java/org/webrtc/examples/androidvoip/MainActivity.java341
-rw-r--r--third_party/libwebrtc/examples/androidvoip/java/org/webrtc/examples/androidvoip/OnVoipClientTaskCompleted.java26
-rw-r--r--third_party/libwebrtc/examples/androidvoip/java/org/webrtc/examples/androidvoip/VoipClient.java191
-rw-r--r--third_party/libwebrtc/examples/androidvoip/jni/android_voip_client.cc514
-rw-r--r--third_party/libwebrtc/examples/androidvoip/jni/android_voip_client.h188
-rw-r--r--third_party/libwebrtc/examples/androidvoip/jni/onload.cc28
-rw-r--r--third_party/libwebrtc/examples/androidvoip/res/layout/activity_main.xml303
-rw-r--r--third_party/libwebrtc/examples/androidvoip/res/values/colors.xml5
-rw-r--r--third_party/libwebrtc/examples/androidvoip/res/values/strings.xml19
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppClient+Internal.h52
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppClient.h87
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppClient.m899
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppEngineClient.h14
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppEngineClient.m175
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDCaptureController.h26
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDCaptureController.m116
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.h18
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.m52
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDJoinResponse+Internal.h23
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDJoinResponse.h32
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDJoinResponse.m82
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDMessageResponse+Internal.h17
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDMessageResponse.h26
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDMessageResponse.m46
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDRoomServerClient.h32
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsModel+Private.h21
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsModel.h123
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsModel.m211
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsStore.h52
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsStore.m115
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSignalingChannel.h48
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSignalingMessage.h58
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSignalingMessage.m160
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDStatsBuilder.h26
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDStatsBuilder.m36
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDTURNClient+Internal.h17
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDTURNClient.h23
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDTURNClient.m86
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDWebSocketChannel.h40
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDWebSocketChannel.m252
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.h23
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.m100
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceServer+JSON.h18
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceServer+JSON.m25
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.h20
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.m36
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/common/ARDUtilities.h35
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/common/ARDUtilities.m126
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDAppDelegate.h17
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDAppDelegate.m56
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.h42
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.m45
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainView.h30
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainView.m196
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainViewController.h14
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainViewController.m263
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDSettingsViewController.h37
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDSettingsViewController.m361
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDStatsView.h21
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDStatsView.m50
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallView.h47
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallView.m213
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.h28
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.m250
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/Info.plist109
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.h18
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.m37
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/UIImage+ARDUtilities.h18
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/UIImage+ARDUtilities.m31
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.h24
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.m130
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.h17
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.m107
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/BroadcastSetupUIInfo.plist39
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/BroadcastUploadInfo.plist33
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/main.m20
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/Roboto-Regular.ttfbin0 -> 126072 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/foreman.mp4bin0 -> 546651 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/iPhone5@2x.pngbin0 -> 3640 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/iPhone6@2x.pngbin0 -> 4856 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/iPhone6p@3x.pngbin0 -> 11152 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_call_end_black_24dp.pngbin0 -> 316 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_call_end_black_24dp@2x.pngbin0 -> 479 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_clear_black_24dp.pngbin0 -> 257 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_clear_black_24dp@2x.pngbin0 -> 360 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_settings_black_24dp.pngbin0 -> 322 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_settings_black_24dp@2x.pngbin0 -> 557 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_surround_sound_black_24dp.pngbin0 -> 285 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_surround_sound_black_24dp@2x.pngbin0 -> 570 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_switch_video_black_24dp.pngbin0 -> 242 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_switch_video_black_24dp@2x.pngbin0 -> 311 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/mozart.mp3bin0 -> 893658 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCAppDelegate.h14
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCAppDelegate.m55
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCViewController.h17
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCViewController.m407
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/mac/Info.plist33
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/mac/main.m22
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/tests/ARDAppClient_xctest.mm266
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/tests/ARDFileCaptureController_xctest.mm62
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/tests/ARDSettingsModel_xctest.mm96
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/tests/main.mm21
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/third_party/SocketRocket/LICENSE15
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.h135
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.m1774
-rw-r--r--third_party/libwebrtc/examples/objc/Icon-120.pngbin0 -> 8133 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/Icon-180.pngbin0 -> 12502 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/Icon.pngbin0 -> 62469 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/README3
-rw-r--r--third_party/libwebrtc/examples/objcnativeapi/Info.plist45
-rw-r--r--third_party/libwebrtc/examples/objcnativeapi/objc/NADAppDelegate.h17
-rw-r--r--third_party/libwebrtc/examples/objcnativeapi/objc/NADAppDelegate.m63
-rw-r--r--third_party/libwebrtc/examples/objcnativeapi/objc/NADViewController.h15
-rw-r--r--third_party/libwebrtc/examples/objcnativeapi/objc/NADViewController.mm154
-rw-r--r--third_party/libwebrtc/examples/objcnativeapi/objc/main.m18
-rw-r--r--third_party/libwebrtc/examples/objcnativeapi/objc/objc_call_client.h82
-rw-r--r--third_party/libwebrtc/examples/objcnativeapi/objc/objc_call_client.mm238
-rw-r--r--third_party/libwebrtc/examples/peerconnection/OWNERS1
-rw-r--r--third_party/libwebrtc/examples/peerconnection/client/conductor.cc614
-rw-r--r--third_party/libwebrtc/examples/peerconnection/client/conductor.h136
-rw-r--r--third_party/libwebrtc/examples/peerconnection/client/defaults.cc59
-rw-r--r--third_party/libwebrtc/examples/peerconnection/client/defaults.h29
-rw-r--r--third_party/libwebrtc/examples/peerconnection/client/flag_defs.h52
-rw-r--r--third_party/libwebrtc/examples/peerconnection/client/linux/main.cc121
-rw-r--r--third_party/libwebrtc/examples/peerconnection/client/linux/main_wnd.cc545
-rw-r--r--third_party/libwebrtc/examples/peerconnection/client/linux/main_wnd.h128
-rw-r--r--third_party/libwebrtc/examples/peerconnection/client/main.cc133
-rw-r--r--third_party/libwebrtc/examples/peerconnection/client/main_wnd.cc633
-rw-r--r--third_party/libwebrtc/examples/peerconnection/client/main_wnd.h206
-rw-r--r--third_party/libwebrtc/examples/peerconnection/client/peer_connection_client.cc493
-rw-r--r--third_party/libwebrtc/examples/peerconnection/client/peer_connection_client.h130
-rw-r--r--third_party/libwebrtc/examples/peerconnection/server/data_socket.cc299
-rw-r--r--third_party/libwebrtc/examples/peerconnection/server/data_socket.h152
-rw-r--r--third_party/libwebrtc/examples/peerconnection/server/main.cc193
-rw-r--r--third_party/libwebrtc/examples/peerconnection/server/peer_channel.cc360
-rw-r--r--third_party/libwebrtc/examples/peerconnection/server/peer_channel.h118
-rw-r--r--third_party/libwebrtc/examples/peerconnection/server/server_test.html237
-rw-r--r--third_party/libwebrtc/examples/peerconnection/server/utils.cc25
-rw-r--r--third_party/libwebrtc/examples/peerconnection/server/utils.h25
-rw-r--r--third_party/libwebrtc/examples/stunprober/main.cc146
-rw-r--r--third_party/libwebrtc/examples/stunserver/stunserver_main.cc49
-rw-r--r--third_party/libwebrtc/examples/turnserver/read_auth_file.cc37
-rw-r--r--third_party/libwebrtc/examples/turnserver/read_auth_file.h24
-rw-r--r--third_party/libwebrtc/examples/turnserver/read_auth_file_unittest.cc45
-rw-r--r--third_party/libwebrtc/examples/turnserver/turnserver_main.cc101
-rw-r--r--third_party/libwebrtc/examples/unityplugin/ANDROID_INSTRUCTION33
-rw-r--r--third_party/libwebrtc/examples/unityplugin/DEPS4
-rw-r--r--third_party/libwebrtc/examples/unityplugin/README309
-rw-r--r--third_party/libwebrtc/examples/unityplugin/class_reference_holder.cc88
-rw-r--r--third_party/libwebrtc/examples/unityplugin/class_reference_holder.h38
-rw-r--r--third_party/libwebrtc/examples/unityplugin/java/src/org/webrtc/UnityUtility.java68
-rw-r--r--third_party/libwebrtc/examples/unityplugin/jni_onload.cc42
-rw-r--r--third_party/libwebrtc/examples/unityplugin/simple_peer_connection.cc586
-rw-r--r--third_party/libwebrtc/examples/unityplugin/simple_peer_connection.h135
-rw-r--r--third_party/libwebrtc/examples/unityplugin/unity_plugin_apis.cc196
-rw-r--r--third_party/libwebrtc/examples/unityplugin/unity_plugin_apis.h108
-rw-r--r--third_party/libwebrtc/examples/unityplugin/video_observer.cc44
-rw-r--r--third_party/libwebrtc/examples/unityplugin/video_observer.h35
264 files changed, 30131 insertions, 0 deletions
diff --git a/third_party/libwebrtc/examples/BUILD.gn b/third_party/libwebrtc/examples/BUILD.gn
new file mode 100644
index 0000000000..458205cea7
--- /dev/null
+++ b/third_party/libwebrtc/examples/BUILD.gn
@@ -0,0 +1,966 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../webrtc.gni")
+
+if (is_android) {
+ import("//build/config/android/config.gni")
+ import("//build/config/android/rules.gni")
+ import("//third_party/jni_zero/jni_zero.gni")
+} else if (is_mac) {
+ import("//build/config/mac/rules.gni")
+} else if (is_ios) {
+ import("//build/config/ios/rules.gni")
+} else if (is_linux || is_chromeos) {
+ import("//build/config/linux/pkg_config.gni")
+}
+
+group("examples") {
+ # This target shall build all targets in examples.
+ testonly = true
+ deps = []
+
+ if (is_android) {
+ deps += [
+ ":AppRTCMobile",
+ ":AppRTCMobile_test_apk",
+
+ #TODO(https://bugs.webrtc.org/15095) - Fix or remove this target.
+ #":libwebrtc_unity",
+ "androidvoip",
+ ]
+
+ # TODO(sakal): We include some code from the tests. Remove this dependency
+ # and remove this if-clause.
+ if (rtc_include_tests) {
+ deps += [ "androidnativeapi" ]
+ }
+ }
+
+ if (!build_with_chromium) {
+ deps += [ ":stun_prober" ]
+ }
+
+ if (is_ios || (is_mac && target_cpu != "x86")) {
+ deps += [ ":AppRTCMobile" ]
+ }
+
+ if (is_linux || is_chromeos || is_win) {
+ deps += [
+ ":peerconnection_server",
+ ":stunserver",
+ ":turnserver",
+ ]
+ if (current_os != "winuwp") {
+ deps += [ ":peerconnection_client" ]
+ }
+ }
+
+ if (is_android || is_win) {
+ deps += [ ":webrtc_unity_plugin" ]
+ }
+}
+
+rtc_library("read_auth_file") {
+ testonly = true
+ sources = [
+ "turnserver/read_auth_file.cc",
+ "turnserver/read_auth_file.h",
+ ]
+ deps = [
+ "../api:array_view",
+ "../rtc_base:stringutils",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings:strings" ]
+}
+
+if (rtc_include_tests) {
+ rtc_test("examples_unittests") {
+ testonly = true
+ sources = [ "turnserver/read_auth_file_unittest.cc" ]
+ deps = [
+ ":read_auth_file",
+ "../test:test_main",
+ "//test:test_support",
+ "//testing/gtest",
+ ]
+ }
+}
+
+if (is_android) {
+ rtc_android_apk("AppRTCMobile") {
+ testonly = true
+ apk_name = "AppRTCMobile"
+ android_manifest = "androidapp/AndroidManifest.xml"
+ min_sdk_version = 21
+ target_sdk_version = 31
+
+ deps = [
+ ":AppRTCMobile_javalib",
+ ":AppRTCMobile_resources",
+ "../rtc_base:base_java",
+ ]
+
+ shared_libraries = [ "../sdk/android:libjingle_peerconnection_so" ]
+ }
+
+ rtc_android_library("AppRTCMobile_javalib") {
+ testonly = true
+ android_manifest = "androidapp/AndroidManifest.xml"
+
+ sources = [
+ "androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java",
+ "androidapp/src/org/appspot/apprtc/AppRTCBluetoothManager.java",
+ "androidapp/src/org/appspot/apprtc/AppRTCClient.java",
+ "androidapp/src/org/appspot/apprtc/AppRTCProximitySensor.java",
+ "androidapp/src/org/appspot/apprtc/CallActivity.java",
+ "androidapp/src/org/appspot/apprtc/CallFragment.java",
+ "androidapp/src/org/appspot/apprtc/CaptureQualityController.java",
+ "androidapp/src/org/appspot/apprtc/ConnectActivity.java",
+ "androidapp/src/org/appspot/apprtc/CpuMonitor.java",
+ "androidapp/src/org/appspot/apprtc/DirectRTCClient.java",
+ "androidapp/src/org/appspot/apprtc/HudFragment.java",
+ "androidapp/src/org/appspot/apprtc/PeerConnectionClient.java",
+ "androidapp/src/org/appspot/apprtc/RecordedAudioToFileController.java",
+ "androidapp/src/org/appspot/apprtc/RoomParametersFetcher.java",
+ "androidapp/src/org/appspot/apprtc/RtcEventLog.java",
+ "androidapp/src/org/appspot/apprtc/SettingsActivity.java",
+ "androidapp/src/org/appspot/apprtc/SettingsFragment.java",
+ "androidapp/src/org/appspot/apprtc/TCPChannelClient.java",
+ "androidapp/src/org/appspot/apprtc/UnhandledExceptionHandler.java",
+ "androidapp/src/org/appspot/apprtc/WebSocketChannelClient.java",
+ "androidapp/src/org/appspot/apprtc/WebSocketRTCClient.java",
+ "androidapp/src/org/appspot/apprtc/util/AppRTCUtils.java",
+ "androidapp/src/org/appspot/apprtc/util/AsyncHttpURLConnection.java",
+ ]
+
+ resources_package = "org.appspot.apprtc"
+ deps = [
+ ":AppRTCMobile_resources",
+ "../rtc_base:base_java",
+ "../sdk/android:audio_api_java",
+ "../sdk/android:base_java",
+ "../sdk/android:camera_java",
+ "../sdk/android:default_video_codec_factory_java",
+ "../sdk/android:filevideo_java",
+ "../sdk/android:hwcodecs_java",
+ "../sdk/android:java_audio_device_module_java",
+ "../sdk/android:libjingle_peerconnection_java",
+ "../sdk/android:libjingle_peerconnection_metrics_default_java",
+ "../sdk/android:peerconnection_java",
+ "../sdk/android:screencapturer_java",
+ "../sdk/android:surfaceviewrenderer_java",
+ "../sdk/android:swcodecs_java",
+ "../sdk/android:video_api_java",
+ "../sdk/android:video_java",
+ "androidapp/third_party/autobanh:autobanh_java",
+ "//third_party/androidx:androidx_annotation_annotation_java",
+ ]
+ }
+
+ android_resources("AppRTCMobile_resources") {
+ testonly = true
+ sources = [
+ "androidapp/res/drawable-hdpi/disconnect.png",
+ "androidapp/res/drawable-hdpi/ic_action_full_screen.png",
+ "androidapp/res/drawable-hdpi/ic_action_return_from_full_screen.png",
+ "androidapp/res/drawable-hdpi/ic_launcher.png",
+ "androidapp/res/drawable-hdpi/ic_loopback_call.png",
+ "androidapp/res/drawable-ldpi/disconnect.png",
+ "androidapp/res/drawable-ldpi/ic_action_full_screen.png",
+ "androidapp/res/drawable-ldpi/ic_action_return_from_full_screen.png",
+ "androidapp/res/drawable-ldpi/ic_launcher.png",
+ "androidapp/res/drawable-ldpi/ic_loopback_call.png",
+ "androidapp/res/drawable-mdpi/disconnect.png",
+ "androidapp/res/drawable-mdpi/ic_action_full_screen.png",
+ "androidapp/res/drawable-mdpi/ic_action_return_from_full_screen.png",
+ "androidapp/res/drawable-mdpi/ic_launcher.png",
+ "androidapp/res/drawable-mdpi/ic_loopback_call.png",
+ "androidapp/res/drawable-xhdpi/disconnect.png",
+ "androidapp/res/drawable-xhdpi/ic_action_full_screen.png",
+ "androidapp/res/drawable-xhdpi/ic_action_return_from_full_screen.png",
+ "androidapp/res/drawable-xhdpi/ic_launcher.png",
+ "androidapp/res/drawable-xhdpi/ic_loopback_call.png",
+ "androidapp/res/layout/activity_call.xml",
+ "androidapp/res/layout/activity_connect.xml",
+ "androidapp/res/layout/fragment_call.xml",
+ "androidapp/res/layout/fragment_hud.xml",
+ "androidapp/res/menu/connect_menu.xml",
+ "androidapp/res/values-v17/styles.xml",
+ "androidapp/res/values-v21/styles.xml",
+ "androidapp/res/values/arrays.xml",
+ "androidapp/res/values/strings.xml",
+ "androidapp/res/xml/preferences.xml",
+ ]
+
+ # Needed for Bazel converter.
+ custom_package = "org.appspot.apprtc"
+ resource_dirs = [ "androidapp/res" ]
+ assert(resource_dirs != []) # Mark as used.
+ }
+
+ rtc_instrumentation_test_apk("AppRTCMobile_test_apk") {
+ apk_name = "AppRTCMobileTest"
+ android_manifest = "androidtests/AndroidManifest.xml"
+ min_sdk_version = 21
+ target_sdk_version = 31
+
+ sources = [
+ "androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java",
+ ]
+
+ apk_under_test = ":AppRTCMobile"
+
+ deps = [
+ ":AppRTCMobile_javalib",
+ "../sdk/android:base_java",
+ "../sdk/android:camera_java",
+ "../sdk/android:libjingle_peerconnection_java",
+ "../sdk/android:peerconnection_java",
+ "../sdk/android:video_api_java",
+ "../sdk/android:video_java",
+ "//third_party/androidx:androidx_test_monitor_java",
+ "//third_party/androidx:androidx_test_runner_java",
+ "//third_party/junit",
+ ]
+ }
+}
+
+if (is_ios || (is_mac && target_cpu != "x86")) {
+ config("apprtc_common_config") {
+ include_dirs = [ "objc/AppRTCMobile/common" ]
+ }
+
+ rtc_library("apprtc_common") {
+ testonly = true
+ sources = [
+ "objc/AppRTCMobile/common/ARDUtilities.h",
+ "objc/AppRTCMobile/common/ARDUtilities.m",
+ ]
+ public_configs = [ ":apprtc_common_config" ]
+ deps = [ "../sdk:base_objc" ]
+ }
+
+ config("apprtc_signaling_config") {
+ include_dirs = [ "objc/AppRTCMobile" ]
+ }
+
+ rtc_library("apprtc_signaling") {
+ testonly = true
+ sources = [
+ "objc/AppRTCMobile/ARDAppClient+Internal.h",
+ "objc/AppRTCMobile/ARDAppClient.h",
+ "objc/AppRTCMobile/ARDAppClient.m",
+ "objc/AppRTCMobile/ARDAppEngineClient.h",
+ "objc/AppRTCMobile/ARDAppEngineClient.m",
+ "objc/AppRTCMobile/ARDCaptureController.h",
+ "objc/AppRTCMobile/ARDCaptureController.m",
+ "objc/AppRTCMobile/ARDExternalSampleCapturer.h",
+ "objc/AppRTCMobile/ARDExternalSampleCapturer.m",
+ "objc/AppRTCMobile/ARDJoinResponse+Internal.h",
+ "objc/AppRTCMobile/ARDJoinResponse.h",
+ "objc/AppRTCMobile/ARDJoinResponse.m",
+ "objc/AppRTCMobile/ARDMessageResponse+Internal.h",
+ "objc/AppRTCMobile/ARDMessageResponse.h",
+ "objc/AppRTCMobile/ARDMessageResponse.m",
+ "objc/AppRTCMobile/ARDRoomServerClient.h",
+ "objc/AppRTCMobile/ARDSettingsModel+Private.h",
+ "objc/AppRTCMobile/ARDSettingsModel.h",
+ "objc/AppRTCMobile/ARDSettingsModel.m",
+ "objc/AppRTCMobile/ARDSettingsStore.h",
+ "objc/AppRTCMobile/ARDSettingsStore.m",
+ "objc/AppRTCMobile/ARDSignalingChannel.h",
+ "objc/AppRTCMobile/ARDSignalingMessage.h",
+ "objc/AppRTCMobile/ARDSignalingMessage.m",
+ "objc/AppRTCMobile/ARDStatsBuilder.h",
+ "objc/AppRTCMobile/ARDStatsBuilder.m",
+ "objc/AppRTCMobile/ARDTURNClient+Internal.h",
+ "objc/AppRTCMobile/ARDTURNClient.h",
+ "objc/AppRTCMobile/ARDTURNClient.m",
+ "objc/AppRTCMobile/ARDWebSocketChannel.h",
+ "objc/AppRTCMobile/ARDWebSocketChannel.m",
+ "objc/AppRTCMobile/RTCIceCandidate+JSON.h",
+ "objc/AppRTCMobile/RTCIceCandidate+JSON.m",
+ "objc/AppRTCMobile/RTCIceServer+JSON.h",
+ "objc/AppRTCMobile/RTCIceServer+JSON.m",
+ "objc/AppRTCMobile/RTCSessionDescription+JSON.h",
+ "objc/AppRTCMobile/RTCSessionDescription+JSON.m",
+ ]
+ public_configs = [ ":apprtc_signaling_config" ]
+ deps = [
+ ":apprtc_common",
+ ":socketrocket",
+ "../sdk:base_objc",
+ "../sdk:default_codec_factory_objc",
+ "../sdk:file_logger_objc",
+ "../sdk:helpers_objc",
+ "../sdk:mediaconstraints_objc",
+ "../sdk:peerconnectionfactory_base_objc",
+ "../sdk:videocapture_objc",
+ "../sdk:videoframebuffer_objc",
+ "../sdk:videosource_objc",
+ ]
+ frameworks = [
+ "CoreMedia.framework",
+ "QuartzCore.framework",
+ ]
+ }
+
+ if (is_ios) {
+ rtc_library("AppRTCMobile_lib") {
+ # iOS must use WebRTC.framework which is dynamically linked.
+ testonly = true
+ sources = [
+ "objc/AppRTCMobile/ios/ARDAppDelegate.h",
+ "objc/AppRTCMobile/ios/ARDAppDelegate.m",
+ "objc/AppRTCMobile/ios/ARDFileCaptureController.h",
+ "objc/AppRTCMobile/ios/ARDFileCaptureController.m",
+ "objc/AppRTCMobile/ios/ARDMainView.h",
+ "objc/AppRTCMobile/ios/ARDMainView.m",
+ "objc/AppRTCMobile/ios/ARDMainViewController.h",
+ "objc/AppRTCMobile/ios/ARDMainViewController.m",
+ "objc/AppRTCMobile/ios/ARDSettingsViewController.h",
+ "objc/AppRTCMobile/ios/ARDSettingsViewController.m",
+ "objc/AppRTCMobile/ios/ARDStatsView.h",
+ "objc/AppRTCMobile/ios/ARDStatsView.m",
+ "objc/AppRTCMobile/ios/ARDVideoCallView.h",
+ "objc/AppRTCMobile/ios/ARDVideoCallView.m",
+ "objc/AppRTCMobile/ios/ARDVideoCallViewController.h",
+ "objc/AppRTCMobile/ios/ARDVideoCallViewController.m",
+ "objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.h",
+ "objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.m",
+ "objc/AppRTCMobile/ios/UIImage+ARDUtilities.h",
+ "objc/AppRTCMobile/ios/UIImage+ARDUtilities.m",
+ ]
+
+ configs += [ "..:common_objc" ]
+
+ deps = [
+ ":apprtc_common",
+ ":apprtc_signaling",
+ "../sdk:audio_session_objc",
+ "../sdk:base_objc",
+ "../sdk:helpers_objc",
+ "../sdk:mediaconstraints_objc",
+ "../sdk:metal_objc",
+ "../sdk:peerconnectionfactory_base_objc",
+ "../sdk:peerconnectionfactory_base_objc",
+ "../sdk:videocapture_objc",
+ "../sdk:videocodec_objc",
+ ]
+ if (rtc_ios_use_opengl_rendering) {
+ deps += [ "../sdk:opengl_ui_objc" ]
+ }
+
+ frameworks = [ "AVFoundation.framework" ]
+ }
+
+ ios_app_bundle("AppRTCMobile") {
+ testonly = true
+ sources = [ "objc/AppRTCMobile/ios/main.m" ]
+
+ info_plist = "objc/AppRTCMobile/ios/Info.plist"
+
+ configs += [ "..:common_config" ]
+ public_configs = [ "..:common_inherited_config" ]
+
+ deps = [
+ ":AppRTCMobile_ios_bundle_data",
+ ":AppRTCMobile_lib",
+ "../sdk:framework_objc",
+ "../sdk:ios_framework_bundle",
+ ]
+
+ if (rtc_apprtcmobile_broadcast_extension) {
+ deps += [
+ ":AppRTCMobileBroadcastSetupUI_extension_bundle",
+ ":AppRTCMobileBroadcastUpload_extension_bundle",
+ ]
+ }
+
+ if (target_cpu == "x86") {
+ deps += [ "//testing/iossim" ]
+ }
+ }
+
+ if (rtc_apprtcmobile_broadcast_extension) {
+ bundle_data("AppRTCMobileBroadcastUpload_extension_bundle") {
+ testonly = true
+ public_deps = [ # no-presubmit-check TODO(webrtc:8603)
+ ":AppRTCMobileBroadcastUpload", # prevent code format
+ ]
+ sources = [ "$root_out_dir/AppRTCMobileBroadcastUpload.appex" ]
+ outputs = [ "{{bundle_contents_dir}}/Plugins/{{source_file_part}}" ]
+ }
+
+ bundle_data("AppRTCMobileBroadcastSetupUI_extension_bundle") {
+ testonly = true
+ public_deps = [ # no-presubmit-check TODO(webrtc:8603)
+ ":AppRTCMobileBroadcastSetupUI", # prevent code format
+ ]
+ sources = [ "$root_out_dir/AppRTCMobileBroadcastSetupUI.appex" ]
+ outputs = [ "{{bundle_contents_dir}}/Plugins/{{source_file_part}}" ]
+ }
+
+ rtc_library("AppRTCMobileBroadcastUpload_lib") {
+ testonly = true
+ sources = [
+ "objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.h",
+ "objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.m",
+ ]
+
+ deps = [
+ ":apprtc_signaling",
+ "../sdk:framework_objc+link",
+ "../sdk:ios_framework_bundle",
+ ]
+
+ frameworks = [ "ReplayKit.framework" ]
+ }
+
+ ios_appex_bundle("AppRTCMobileBroadcastUpload") {
+ testonly = true
+ configs += [ "..:common_config" ]
+ public_configs = [ "..:common_inherited_config" ]
+
+ info_plist = "objc/AppRTCMobile/ios/broadcast_extension/BroadcastUploadInfo.plist"
+
+ deps = [
+ ":AppRTCMobileBroadcastUpload_lib",
+ "../sdk:framework_objc",
+ ]
+ }
+
+ ios_appex_bundle("AppRTCMobileBroadcastSetupUI") {
+ sources = [
+ "objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.h",
+ "objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.m",
+ ]
+
+ info_plist = "objc/AppRTCMobile/ios/broadcast_extension/BroadcastSetupUIInfo.plist"
+
+ frameworks = [ "ReplayKit.framework" ]
+
+ deps = [ ":AppRTCMobile_ios_bundle_data" ]
+ }
+ }
+
+ bundle_data("AppRTCMobile_ios_bundle_data") {
+ sources = [
+ "objc/AppRTCMobile/ios/resources/Roboto-Regular.ttf",
+
+ # Sample video taken from https://media.xiph.org/video/derf/
+ "objc/AppRTCMobile/ios/resources/foreman.mp4",
+ "objc/AppRTCMobile/ios/resources/iPhone5@2x.png",
+ "objc/AppRTCMobile/ios/resources/iPhone6@2x.png",
+ "objc/AppRTCMobile/ios/resources/iPhone6p@3x.png",
+ "objc/AppRTCMobile/ios/resources/ic_call_end_black_24dp.png",
+ "objc/AppRTCMobile/ios/resources/ic_call_end_black_24dp@2x.png",
+ "objc/AppRTCMobile/ios/resources/ic_clear_black_24dp.png",
+ "objc/AppRTCMobile/ios/resources/ic_clear_black_24dp@2x.png",
+ "objc/AppRTCMobile/ios/resources/ic_settings_black_24dp.png",
+ "objc/AppRTCMobile/ios/resources/ic_settings_black_24dp@2x.png",
+ "objc/AppRTCMobile/ios/resources/ic_surround_sound_black_24dp.png",
+ "objc/AppRTCMobile/ios/resources/ic_surround_sound_black_24dp@2x.png",
+ "objc/AppRTCMobile/ios/resources/ic_switch_video_black_24dp.png",
+ "objc/AppRTCMobile/ios/resources/ic_switch_video_black_24dp@2x.png",
+ "objc/AppRTCMobile/ios/resources/mozart.mp3",
+ "objc/Icon-120.png",
+ "objc/Icon-180.png",
+ "objc/Icon.png",
+ ]
+ outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ]
+ }
+
+ rtc_library("ObjCNativeAPIDemo_lib") {
+ testonly = true
+ sources = [
+ "objcnativeapi/objc/NADAppDelegate.h",
+ "objcnativeapi/objc/NADAppDelegate.m",
+ "objcnativeapi/objc/NADViewController.h",
+ "objcnativeapi/objc/NADViewController.mm",
+ "objcnativeapi/objc/objc_call_client.h",
+ "objcnativeapi/objc/objc_call_client.mm",
+ ]
+
+ deps = [
+ "../api:libjingle_peerconnection_api",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/audio_codecs:builtin_audio_decoder_factory",
+ "../api/audio_codecs:builtin_audio_encoder_factory",
+ "../api/rtc_event_log:rtc_event_log_factory",
+ "../api/task_queue:default_task_queue_factory",
+ "../media:rtc_audio_video",
+ "../modules/audio_processing",
+ "../modules/audio_processing:api",
+ "../pc:libjingle_peerconnection",
+ "../rtc_base/synchronization:mutex",
+ "../sdk:base_objc",
+ "../sdk:default_codec_factory_objc",
+ "../sdk:helpers_objc",
+ "../sdk:metal_objc",
+ "../sdk:native_api",
+ "../sdk:videocapture_objc",
+ "../sdk:videotoolbox_objc",
+ ]
+
+ if (rtc_ios_use_opengl_rendering) {
+ deps += [ "../sdk:opengl_ui_objc" ]
+ }
+ }
+
+ ios_app_bundle("ObjCNativeAPIDemo") {
+ testonly = true
+ sources = [ "objcnativeapi/objc/main.m" ]
+
+ info_plist = "objcnativeapi/Info.plist"
+
+ configs += [ "..:common_config" ]
+ public_configs = [ "..:common_inherited_config" ]
+
+ deps = [ ":ObjCNativeAPIDemo_lib" ]
+
+ if (target_cpu == "x86") {
+ deps += [ "//testing/iossim" ]
+ }
+ }
+ }
+
+ if (is_mac) {
+ rtc_library("AppRTCMobile_lib") {
+ testonly = true
+ sources = [
+ "objc/AppRTCMobile/mac/APPRTCAppDelegate.h",
+ "objc/AppRTCMobile/mac/APPRTCAppDelegate.m",
+ "objc/AppRTCMobile/mac/APPRTCViewController.h",
+ "objc/AppRTCMobile/mac/APPRTCViewController.m",
+ ]
+ configs += [ "..:common_objc" ]
+ deps = [
+ ":apprtc_common",
+ ":apprtc_signaling",
+ "../sdk:base_objc",
+ "../sdk:helpers_objc",
+ "../sdk:mediaconstraints_objc",
+ "../sdk:metal_objc",
+ "../sdk:peerconnectionfactory_base_objc",
+ "../sdk:peerconnectionfactory_base_objc",
+ "../sdk:videocapture_objc",
+ "../sdk:videocodec_objc",
+ ]
+ }
+
+ mac_app_bundle("AppRTCMobile") {
+ testonly = true
+ output_name = "AppRTCMobile"
+
+ sources = [ "objc/AppRTCMobile/mac/main.m" ]
+
+ public_configs = [ "..:common_inherited_config" ]
+
+ info_plist = "objc/AppRTCMobile/mac/Info.plist"
+
+ frameworks = [ "AppKit.framework" ]
+
+ ldflags = [
+ "-rpath",
+ "@executable_path/../Frameworks",
+ ]
+
+ deps = [
+ ":AppRTCMobile_lib",
+ "../sdk:mac_framework_bundle",
+ "../sdk:mac_framework_objc+link",
+ ]
+ }
+ }
+
+ config("socketrocket_include_config") {
+ include_dirs = [ "objc/AppRTCMobile/third_party/SocketRocket" ]
+ }
+
+ config("socketrocket_warning_config") {
+ # GN orders flags on a target before flags from configs. The default config
+ # adds these flags so to cancel them out they need to come from a config and
+ # cannot be on the target directly.
+ cflags = [
+ "-Wno-deprecated-declarations",
+ "-Wno-nonnull",
+ "-Wno-semicolon-before-method-body",
+ "-Wno-unused-variable",
+ ]
+
+ cflags_objc = [
+ # Enabled for cflags_objc in build/config/compiler/BUILD.gn.
+ "-Wno-objc-missing-property-synthesis",
+ ]
+ }
+
+ rtc_library("socketrocket") {
+ testonly = true
+ sources = [
+ "objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.h",
+ "objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.m",
+ ]
+ configs += [ ":socketrocket_warning_config" ]
+ public_configs = [ ":socketrocket_include_config" ]
+
+ libs = [ "icucore" ]
+ frameworks = [
+ "CFNetwork.framework",
+ "Security.framework",
+ ]
+ }
+
+ if (rtc_include_tests) {
+ # TODO(kthelgason): compile xctests on mac when chromium supports it.
+ if (is_ios) {
+ rtc_library("apprtcmobile_test_sources") {
+ # iOS must use WebRTC.framework which is dynamically linked.
+ testonly = true
+ include_dirs = [
+ "objc/AppRTCMobile",
+ "objc/AppRTCMobile/ios",
+ ]
+ sources = [
+ "objc/AppRTCMobile/tests/ARDAppClient_xctest.mm",
+ "objc/AppRTCMobile/tests/ARDFileCaptureController_xctest.mm",
+ "objc/AppRTCMobile/tests/ARDSettingsModel_xctest.mm",
+ ]
+ deps = [
+ ":AppRTCMobile_lib",
+ ":apprtc_signaling",
+ "../rtc_base:ssl",
+ "../sdk:mediaconstraints_objc",
+ "../sdk:peerconnectionfactory_base_objc",
+ "../sdk:videocapture_objc",
+ "//build/config/ios:xctest",
+ "//third_party/ocmock",
+ ]
+ }
+
+ rtc_test("apprtcmobile_tests") {
+ is_xctest = true
+ info_plist = "objc/AppRTCMobile/ios/Info.plist"
+ sources = [ "objc/AppRTCMobile/tests/main.mm" ]
+ deps = [
+ ":AppRTCMobile_lib",
+ ":apprtcmobile_test_sources",
+ "../sdk:framework_objc",
+ "//test:test_support",
+ ]
+ ldflags = [ "-all_load" ]
+ }
+ }
+ }
+}
+
+if (is_linux || is_chromeos || is_win) {
+ if (is_linux || is_chromeos) {
+ pkg_config("gtk_config") {
+ packages = [
+ # Gtk requires gmodule, but it does not list it as a dependency in some
+ # misconfigured systems.
+ "gmodule-2.0",
+ "gthread-2.0",
+ "gtk+-3.0",
+ ]
+ }
+ }
+
+ rtc_executable("peerconnection_client") {
+ testonly = true
+ sources = [
+ "peerconnection/client/conductor.cc",
+ "peerconnection/client/conductor.h",
+ "peerconnection/client/defaults.cc",
+ "peerconnection/client/defaults.h",
+ "peerconnection/client/peer_connection_client.cc",
+ "peerconnection/client/peer_connection_client.h",
+ ]
+
+ deps = [
+ "../api:async_dns_resolver",
+ "../api:audio_options_api",
+ "../api:create_peerconnection_factory",
+ "../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
+ "../api:rtp_sender_interface",
+ "../api:scoped_refptr",
+ "../api/audio:audio_mixer_api",
+ "../api/audio_codecs:audio_codecs_api",
+ "../api/task_queue:pending_task_safety_flag",
+ "../api/units:time_delta",
+ "../api/video:video_frame",
+ "../api/video:video_rtp_headers",
+ "../api/video_codecs:video_codecs_api",
+ "../media:media_channel",
+ "../media:rtc_media_base",
+ "../p2p:rtc_p2p",
+ "../pc:video_track_source",
+ "../rtc_base:async_dns_resolver",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:net_helpers",
+ "../rtc_base:refcount",
+ "../rtc_base:rtc_certificate_generator",
+ "../rtc_base:ssl",
+ "../rtc_base:stringutils",
+ "../rtc_base:threading",
+ "../rtc_base/third_party/sigslot",
+ "../system_wrappers:field_trial",
+ "../test:field_trial",
+ "../test:platform_video_capturer",
+ "../test:rtp_test_utils",
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+ if (is_win) {
+ sources += [
+ "peerconnection/client/flag_defs.h",
+ "peerconnection/client/main.cc",
+ "peerconnection/client/main_wnd.cc",
+ "peerconnection/client/main_wnd.h",
+ ]
+ configs += [ "//build/config/win:windowed" ]
+ deps += [
+ "../media:rtc_media_base",
+ "../rtc_base:win32",
+ "../rtc_base:win32_socket_init",
+ ]
+ }
+ if (is_linux || is_chromeos) {
+ sources += [
+ "peerconnection/client/linux/main.cc",
+ "peerconnection/client/linux/main_wnd.cc",
+ "peerconnection/client/linux/main_wnd.h",
+ ]
+ cflags = [ "-Wno-deprecated-declarations" ]
+ libs = [
+ "X11",
+ "Xcomposite",
+ "Xext",
+ "Xrender",
+ ]
+ configs += [ ":gtk_config" ]
+ }
+
+ deps += [
+ "../api:libjingle_peerconnection_api",
+ "../api/audio_codecs:builtin_audio_decoder_factory",
+ "../api/audio_codecs:builtin_audio_encoder_factory",
+ "../api/video:video_frame",
+ "../api/video:video_rtp_headers",
+ "../api/video_codecs:video_decoder_factory_template",
+ "../api/video_codecs:video_decoder_factory_template_dav1d_adapter",
+ "../api/video_codecs:video_decoder_factory_template_libvpx_vp8_adapter",
+ "../api/video_codecs:video_decoder_factory_template_libvpx_vp9_adapter",
+ "../api/video_codecs:video_decoder_factory_template_open_h264_adapter",
+ "../api/video_codecs:video_encoder_factory_template",
+ "../api/video_codecs:video_encoder_factory_template_libaom_av1_adapter",
+ "../api/video_codecs:video_encoder_factory_template_libvpx_vp8_adapter",
+ "../api/video_codecs:video_encoder_factory_template_libvpx_vp9_adapter",
+ "../api/video_codecs:video_encoder_factory_template_open_h264_adapter",
+ "../media:rtc_audio_video",
+ "../modules/audio_device",
+ "../modules/audio_processing",
+ "../modules/audio_processing:api",
+ "../modules/video_capture:video_capture_module",
+ "../pc:libjingle_peerconnection",
+ "../rtc_base:rtc_json",
+ "../test:video_test_common",
+ "//third_party/abseil-cpp/absl/flags:flag",
+ "//third_party/abseil-cpp/absl/flags:parse",
+ "//third_party/libyuv",
+ ]
+ }
+
+ rtc_executable("peerconnection_server") {
+ testonly = true
+ sources = [
+ "peerconnection/server/data_socket.cc",
+ "peerconnection/server/data_socket.h",
+ "peerconnection/server/main.cc",
+ "peerconnection/server/peer_channel.cc",
+ "peerconnection/server/peer_channel.h",
+ "peerconnection/server/utils.cc",
+ "peerconnection/server/utils.h",
+ ]
+ deps = [
+ "../rtc_base:checks",
+ "../rtc_base:stringutils",
+ "../system_wrappers:field_trial",
+ "../test:field_trial",
+ "//third_party/abseil-cpp/absl/flags:flag",
+ "//third_party/abseil-cpp/absl/flags:parse",
+ "//third_party/abseil-cpp/absl/flags:usage",
+ ]
+ }
+ rtc_executable("turnserver") {
+ testonly = true
+ sources = [ "turnserver/turnserver_main.cc" ]
+ deps = [
+ ":read_auth_file",
+ "../p2p:p2p_server_utils",
+ "../p2p:rtc_p2p",
+ "../pc:rtc_pc",
+ "../rtc_base:async_udp_socket",
+ "../rtc_base:ip_address",
+ "../rtc_base:socket_address",
+ "../rtc_base:socket_server",
+ "../rtc_base:threading",
+ "//third_party/abseil-cpp/absl/strings:strings",
+ ]
+ }
+ rtc_executable("stunserver") {
+ testonly = true
+ sources = [ "stunserver/stunserver_main.cc" ]
+ deps = [
+ "../p2p:p2p_server_utils",
+ "../p2p:rtc_p2p",
+ "../pc:rtc_pc",
+ "../rtc_base:async_udp_socket",
+ "../rtc_base:socket_address",
+ "../rtc_base:socket_server",
+ "../rtc_base:threading",
+ ]
+ }
+}
+
+if (is_win || is_android) {
+ rtc_shared_library("webrtc_unity_plugin") {
+ testonly = true
+ sources = [
+ "unityplugin/simple_peer_connection.cc",
+ "unityplugin/simple_peer_connection.h",
+ "unityplugin/unity_plugin_apis.cc",
+ "unityplugin/unity_plugin_apis.h",
+ "unityplugin/video_observer.cc",
+ "unityplugin/video_observer.h",
+ ]
+
+ if (is_android) {
+ sources += [
+ "unityplugin/class_reference_holder.cc",
+ "unityplugin/class_reference_holder.h",
+ "unityplugin/jni_onload.cc",
+ ]
+ suppressed_configs += [ "//build/config/android:hide_all_but_jni_onload" ]
+ }
+
+ if (is_win) {
+ configs += [ "//build/config/win:windowed" ]
+ }
+ deps = [
+ "../api:create_peerconnection_factory",
+ "../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
+ "../api/audio_codecs:builtin_audio_decoder_factory",
+ "../api/audio_codecs:builtin_audio_encoder_factory",
+ "../api/video:video_frame",
+ "../api/video:video_rtp_headers",
+ "../media:rtc_audio_video",
+ "../media:rtc_internal_video_codecs",
+ "../media:rtc_media",
+ "../media:rtc_media_base",
+ "../modules/audio_device",
+ "../modules/audio_processing",
+ "../modules/audio_processing:api",
+ "../modules/video_capture:video_capture_module",
+ "../pc:libjingle_peerconnection",
+ "../pc:video_track_source",
+ "../rtc_base:ssl",
+ "../test:platform_video_capturer",
+ "../test:video_test_common",
+ "//third_party/abseil-cpp/absl/memory",
+ ]
+ if (is_android) {
+ deps += [
+ "../modules/utility",
+ "../sdk/android:libjingle_peerconnection_jni",
+ "../sdk/android:native_api_jni",
+ ]
+ }
+ }
+}
+
+if (is_android) {
+ rtc_android_library("webrtc_unity_java") {
+ sources = [ "unityplugin/java/src/org/webrtc/UnityUtility.java" ]
+ deps = [
+ "../rtc_base:base_java",
+ "../sdk/android:camera_java",
+ "../sdk/android:libjingle_peerconnection_java",
+ "../sdk/android:peerconnection_java",
+ "../sdk/android:video_api_java",
+ "../sdk/android:video_java",
+ "//third_party/androidx:androidx_annotation_annotation_java",
+ ]
+ }
+
+ # TODO(https://bugs.webrtc.org/15095) - Fix or remove this target.
+ #dist_jar("libwebrtc_unity") {
+ # _target_dir_name = get_label_info(":$target_name", "dir")
+ # output = "${root_out_dir}/lib.java${_target_dir_name}/${target_name}.jar"
+ # direct_deps_only = false
+ # use_interface_jars = false
+ # use_unprocessed_jars = false
+ # requires_android = true
+ # deps = [
+ # ":webrtc_unity_java",
+ # "../rtc_base:base_java",
+ # "../sdk/android:libjingle_peerconnection_java",
+ # "../sdk/android:libjingle_peerconnection_metrics_default_java",
+ # "//third_party/androidx:androidx_annotation_annotation_java",
+ # ]
+ #}
+
+ robolectric_binary("android_examples_junit_tests") {
+ sources = [
+ "androidjunit/src/org/appspot/apprtc/BluetoothManagerTest.java",
+ "androidjunit/src/org/appspot/apprtc/DirectRTCClientTest.java",
+ "androidjunit/src/org/appspot/apprtc/TCPChannelClientTest.java",
+ ]
+
+ deps = [
+ ":AppRTCMobile_javalib",
+ "../sdk/android:peerconnection_java",
+ "//third_party/androidx:androidx_test_core_java",
+ "//third_party/google-truth:google_truth_java",
+ ]
+
+ additional_jar_files = [ [
+ "../sdk/android/tests/resources/robolectric.properties",
+ "robolectric.properties",
+ ] ]
+ }
+}
+
+if (!build_with_chromium) {
+ # Doesn't build within Chrome on Win.
+ rtc_executable("stun_prober") {
+ testonly = true
+ sources = [ "stunprober/main.cc" ]
+ deps = [
+ "../p2p:libstunprober",
+ "../p2p:rtc_p2p",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ "../rtc_base:network",
+ "../rtc_base:socket_address",
+ "../rtc_base:ssl",
+ "../rtc_base:threading",
+ "../rtc_base:timeutils",
+ "../test:scoped_key_value_config",
+ "//third_party/abseil-cpp/absl/flags:flag",
+ "//third_party/abseil-cpp/absl/flags:parse",
+ ]
+ }
+}
diff --git a/third_party/libwebrtc/examples/DEPS b/third_party/libwebrtc/examples/DEPS
new file mode 100644
index 0000000000..114cda384b
--- /dev/null
+++ b/third_party/libwebrtc/examples/DEPS
@@ -0,0 +1,13 @@
+include_rules = [
+ "+common_video",
+ "+logging/rtc_event_log/rtc_event_log_factory.h",
+ "+media",
+ "+modules/audio_device",
+ "+modules/video_capture",
+ "+modules/audio_processing",
+ "+p2p",
+ "+pc",
+ "+sdk/objc",
+ "+system_wrappers/include",
+ "+third_party/libyuv",
+]
diff --git a/third_party/libwebrtc/examples/OWNERS b/third_party/libwebrtc/examples/OWNERS
new file mode 100644
index 0000000000..ff1f425462
--- /dev/null
+++ b/third_party/libwebrtc/examples/OWNERS
@@ -0,0 +1,4 @@
+magjed@webrtc.org
+perkj@webrtc.org
+tkchin@webrtc.org
+kthelgason@webrtc.org
diff --git a/third_party/libwebrtc/examples/aarproject/.gitignore b/third_party/libwebrtc/examples/aarproject/.gitignore
new file mode 100644
index 0000000000..e93eb885a9
--- /dev/null
+++ b/third_party/libwebrtc/examples/aarproject/.gitignore
@@ -0,0 +1,16 @@
+# Default ignores by Android Studio
+*.iml
+.gradle
+# We want to specify our own SDK.
+# /local.properties
+/.idea/workspace.xml
+/.idea/libraries
+.DS_Store
+/build
+/captures
+.externalNativeBuild
+
+# Additional ignores
+/gradlew
+/gradlew.bat
+/gradle
diff --git a/third_party/libwebrtc/examples/aarproject/OWNERS b/third_party/libwebrtc/examples/aarproject/OWNERS
new file mode 100644
index 0000000000..cf092a316a
--- /dev/null
+++ b/third_party/libwebrtc/examples/aarproject/OWNERS
@@ -0,0 +1 @@
+xalep@webrtc.org
diff --git a/third_party/libwebrtc/examples/aarproject/app/.gitignore b/third_party/libwebrtc/examples/aarproject/app/.gitignore
new file mode 100644
index 0000000000..796b96d1c4
--- /dev/null
+++ b/third_party/libwebrtc/examples/aarproject/app/.gitignore
@@ -0,0 +1 @@
+/build
diff --git a/third_party/libwebrtc/examples/aarproject/app/build.gradle b/third_party/libwebrtc/examples/aarproject/app/build.gradle
new file mode 100644
index 0000000000..70669c3b63
--- /dev/null
+++ b/third_party/libwebrtc/examples/aarproject/app/build.gradle
@@ -0,0 +1,53 @@
+apply plugin: 'com.android.application'
+
+android {
+ compileSdkVersion 31
+ defaultConfig {
+ applicationId "org.appspot.apprtc"
+ minSdkVersion 21
+ targetSdkVersion 31
+ versionCode 1
+ versionName "1.0"
+ testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
+ }
+ buildTypes {
+ release {
+ minifyEnabled false
+ proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
+ }
+ }
+ compileOptions {
+ sourceCompatibility JavaVersion.VERSION_1_8
+ targetCompatibility JavaVersion.VERSION_1_8
+ }
+ sourceSets {
+ main {
+ manifest.srcFile "../../androidapp/AndroidManifest.xml"
+ java.srcDirs = [
+ "../../androidapp/src"
+ ]
+ res.srcDirs = [
+ "../../androidapp/res"
+ ]
+ }
+ androidTest {
+ manifest.srcFile "../../androidtests/AndroidManifest.xml"
+ java.srcDirs = [
+ "../../androidtests/src"
+ ]
+ // This test doesn't work in Android Studio.
+ java.exclude('**/CallActivityStubbedInputOutputTest.java')
+ }
+ }
+}
+
+dependencies {
+ if (project.hasProperty('aarDir')) {
+ implementation fileTree(dir: project.aarDir, include: ['google-webrtc-*.aar'])
+ }
+ implementation fileTree(dir: '../../androidapp/third_party/autobanh/lib', include: ['autobanh.jar'])
+ implementation 'androidx.annotation:annotation:1.2.0'
+ testImplementation 'junit:junit:4.12'
+ androidTestImplementation 'com.androidx.test:runner:1.0.1'
+ androidTestImplementation 'com.androidx.test.espresso:espresso-core:3.0.1'
+}
diff --git a/third_party/libwebrtc/examples/aarproject/app/proguard-rules.pro b/third_party/libwebrtc/examples/aarproject/app/proguard-rules.pro
new file mode 100644
index 0000000000..d6cc4c5eba
--- /dev/null
+++ b/third_party/libwebrtc/examples/aarproject/app/proguard-rules.pro
@@ -0,0 +1,25 @@
+# Add project specific ProGuard rules here.
+# By default, the flags in this file are appended to flags specified
+# in /usr/local/google/home/sakal/Android/Sdk/tools/proguard/proguard-android.txt
+# You can edit the include path and order by changing the proguardFiles
+# directive in build.gradle.
+#
+# For more details, see
+# http://developer.android.com/guide/developing/tools/proguard.html
+
+# Add any project specific keep options here:
+
+# If your project uses WebView with JS, uncomment the following
+# and specify the fully qualified class name to the JavaScript interface
+# class:
+#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
+# public *;
+#}
+
+# Uncomment this to preserve the line number information for
+# debugging stack traces.
+#-keepattributes SourceFile,LineNumberTable
+
+# If you keep the line number information, uncomment this to
+# hide the original source file name.
+#-renamesourcefileattribute SourceFile
diff --git a/third_party/libwebrtc/examples/aarproject/build.gradle b/third_party/libwebrtc/examples/aarproject/build.gradle
new file mode 100644
index 0000000000..969effd3ed
--- /dev/null
+++ b/third_party/libwebrtc/examples/aarproject/build.gradle
@@ -0,0 +1,27 @@
+// Top-level build file where you can add configuration options common to all sub-projects/modules.
+
+buildscript {
+
+ repositories {
+ google()
+ mavenCentral()
+ }
+ dependencies {
+ classpath "com.android.tools.build:gradle:7.0.3"
+
+
+ // NOTE: Do not place your application dependencies here; they belong
+ // in the individual module build.gradle files
+ }
+}
+
+allprojects {
+ repositories {
+ google()
+ mavenCentral()
+ }
+}
+
+task clean(type: Delete) {
+ delete rootProject.buildDir
+}
diff --git a/third_party/libwebrtc/examples/aarproject/gradle.properties b/third_party/libwebrtc/examples/aarproject/gradle.properties
new file mode 100644
index 0000000000..2e87c52f83
--- /dev/null
+++ b/third_party/libwebrtc/examples/aarproject/gradle.properties
@@ -0,0 +1,22 @@
+# Project-wide Gradle settings.
+
+# IDE (e.g. Android Studio) users:
+# Gradle settings configured through the IDE *will override*
+# any settings specified in this file.
+
+# For more details on how to configure your build environment visit
+# http://www.gradle.org/docs/current/userguide/build_environment.html
+
+# Specifies the JVM arguments used for the daemon process.
+# The setting is particularly useful for tweaking memory settings.
+org.gradle.jvmargs=-Xmx1536m
+
+# When configured, Gradle will run in incubating parallel mode.
+# This option should only be used with decoupled projects. More details, visit
+# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
+# org.gradle.parallel=true
+
+# AndroidX package structure to make it clearer which packages are bundled with the
+# Android operating system, and which are packaged with your app's APK
+# https://developer.android.com/topic/libraries/support-library/androidx-rn
+android.useAndroidX=true
diff --git a/third_party/libwebrtc/examples/aarproject/local.properties b/third_party/libwebrtc/examples/aarproject/local.properties
new file mode 100644
index 0000000000..99cdcd2674
--- /dev/null
+++ b/third_party/libwebrtc/examples/aarproject/local.properties
@@ -0,0 +1,2 @@
+# Use Android SDK from third_party/android_sdk/public
+sdk.dir=../../third_party/android_sdk/public
diff --git a/third_party/libwebrtc/examples/aarproject/settings.gradle b/third_party/libwebrtc/examples/aarproject/settings.gradle
new file mode 100644
index 0000000000..e7b4def49c
--- /dev/null
+++ b/third_party/libwebrtc/examples/aarproject/settings.gradle
@@ -0,0 +1 @@
+include ':app'
diff --git a/third_party/libwebrtc/examples/androidapp/AndroidManifest.xml b/third_party/libwebrtc/examples/androidapp/AndroidManifest.xml
new file mode 100644
index 0000000000..05f1bd3da3
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/AndroidManifest.xml
@@ -0,0 +1,60 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ xmlns:tools="http://schemas.android.com/tools"
+ package="org.appspot.apprtc"
+ android:versionCode="1"
+ android:versionName="1.0">
+
+ <uses-feature android:name="android.hardware.camera" />
+ <uses-feature android:name="android.hardware.camera.autofocus" />
+ <uses-feature android:glEsVersion="0x00020000" android:required="true" />
+
+ <uses-permission android:name="android.permission.CAMERA" />
+ <uses-permission android:name="android.permission.CHANGE_NETWORK_STATE" />
+ <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
+ <uses-permission android:name="android.permission.RECORD_AUDIO" />
+ <uses-permission android:name="android.permission.BLUETOOTH" />
+ <uses-permission android:name="android.permission.INTERNET" />
+ <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
+ <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
+
+ <!-- This is a test application that should always be debuggable. -->
+ <application android:label="@string/app_name"
+ android:icon="@drawable/ic_launcher"
+ android:allowBackup="false"
+ android:debuggable="true"
+ android:supportsRtl="false"
+ tools:ignore="HardcodedDebugMode">
+
+ <activity android:name="ConnectActivity"
+ android:label="@string/app_name"
+ android:windowSoftInputMode="adjustPan"
+ android:theme="@style/AppTheme"
+ android:exported="true">
+ <intent-filter>
+ <action android:name="android.intent.action.MAIN"/>
+ <category android:name="android.intent.category.LAUNCHER"/>
+ </intent-filter>
+
+ <intent-filter>
+ <action android:name="android.intent.action.VIEW"/>
+ <category android:name="android.intent.category.DEFAULT"/>
+ <category android:name="android.intent.category.BROWSABLE"/>
+ <data android:scheme="https" android:host="appr.tc"/>
+ <data android:scheme="http" android:host="appr.tc"/>
+ </intent-filter>
+ </activity>
+
+ <activity android:name="SettingsActivity"
+ android:label="@string/settings_name"
+ android:theme="@style/AppTheme">
+ </activity>
+
+ <activity android:name="CallActivity"
+ android:label="@string/app_name"
+ android:screenOrientation="fullUser"
+ android:configChanges="orientation|smallestScreenSize|screenSize|screenLayout"
+ android:theme="@style/CallActivityTheme">
+ </activity>
+ </application>
+</manifest>
diff --git a/third_party/libwebrtc/examples/androidapp/OWNERS b/third_party/libwebrtc/examples/androidapp/OWNERS
new file mode 100644
index 0000000000..109bea2725
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/OWNERS
@@ -0,0 +1,2 @@
+magjed@webrtc.org
+xalep@webrtc.org
diff --git a/third_party/libwebrtc/examples/androidapp/README b/third_party/libwebrtc/examples/androidapp/README
new file mode 100644
index 0000000000..97e609117c
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/README
@@ -0,0 +1,23 @@
+This directory contains an example Android client for https://appr.tc
+
+Prerequisites:
+- "Getting the code", "Compiling", and "Using the Bundled Android SDK/NDK"
+ on http://www.webrtc.org/native-code/android
+
+Example of building & using the app:
+
+cd <path/to/webrtc>/src
+ninja -C out/Default AppRTCMobile
+adb install -r out/Default/apks/AppRTCMobile.apk
+
+In desktop chrome, navigate to https://appr.tc and note the r=<NNN> room
+this redirects to or navigate directly to https://appr.tc/r/<NNN> with
+your own room number. Launch AppRTC on the device and add same <NNN> into the room name list.
+
+You can also run application from a command line to connect to the first room in a list:
+adb shell am start -n org.appspot.apprtc/.ConnectActivity -a android.intent.action.VIEW
+This should result in the app launching on Android and connecting to the 3-dot-apprtc
+page displayed in the desktop browser.
+To run loopback test execute following command:
+adb shell am start -n org.appspot.apprtc/.ConnectActivity -a android.intent.action.VIEW --ez "org.appspot.apprtc.LOOPBACK" true
+
diff --git a/third_party/libwebrtc/examples/androidapp/ant.properties b/third_party/libwebrtc/examples/androidapp/ant.properties
new file mode 100644
index 0000000000..b0971e891e
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/ant.properties
@@ -0,0 +1,17 @@
+# This file is used to override default values used by the Ant build system.
+#
+# This file must be checked into Version Control Systems, as it is
+# integral to the build system of your project.
+
+# This file is only used by the Ant script.
+
+# You can use this to override default values such as
+# 'source.dir' for the location of your java source folder and
+# 'out.dir' for the location of your output folder.
+
+# You can also use it define how the release builds are signed by declaring
+# the following properties:
+# 'key.store' for the location of your keystore and
+# 'key.alias' for the name of the key to use.
+# The password will be asked during the build when you use the 'release' target.
+
diff --git a/third_party/libwebrtc/examples/androidapp/build.xml b/third_party/libwebrtc/examples/androidapp/build.xml
new file mode 100644
index 0000000000..aa1db6db79
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/build.xml
@@ -0,0 +1,92 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project name="AppRTCMobile" default="help">
+
+ <!-- The local.properties file is created and updated by the 'android' tool.
+ It contains the path to the SDK. It should *NOT* be checked into
+ Version Control Systems. -->
+ <property file="local.properties" />
+
+ <!-- The ant.properties file can be created by you. It is only edited by the
+ 'android' tool to add properties to it.
+ This is the place to change some Ant specific build properties.
+ Here are some properties you may want to change/update:
+
+ source.dir
+ The name of the source directory. Default is 'src'.
+ out.dir
+ The name of the output directory. Default is 'bin'.
+
+ For other overridable properties, look at the beginning of the rules
+ files in the SDK, at tools/ant/build.xml
+
+ Properties related to the SDK location or the project target should
+ be updated using the 'android' tool with the 'update' action.
+
+ This file is an integral part of the build system for your
+ application and should be checked into Version Control Systems.
+
+ -->
+ <property file="ant.properties" />
+
+ <!-- if sdk.dir was not set from one of the property file, then
+ get it from the ANDROID_HOME env var.
+ This must be done before we load project.properties since
+ the proguard config can use sdk.dir -->
+ <property environment="env" />
+ <condition property="sdk.dir" value="${env.ANDROID_SDK_ROOT}">
+ <isset property="env.ANDROID_SDK_ROOT" />
+ </condition>
+
+ <!-- The project.properties file is created and updated by the 'android'
+ tool, as well as ADT.
+
+ This contains project specific properties such as project target, and library
+ dependencies. Lower level build properties are stored in ant.properties
+ (or in .classpath for Eclipse projects).
+
+ This file is an integral part of the build system for your
+ application and should be checked into Version Control Systems. -->
+ <loadproperties srcFile="project.properties" />
+
+ <!-- quick check on sdk.dir -->
+ <fail
+ message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
+ unless="sdk.dir"
+ />
+
+ <!--
+ Import per project custom build rules if present at the root of the project.
+ This is the place to put custom intermediary targets such as:
+ -pre-build
+ -pre-compile
+ -post-compile (This is typically used for code obfuscation.
+ Compiled code location: ${out.classes.absolute.dir}
+ If this is not done in place, override ${out.dex.input.absolute.dir})
+ -post-package
+ -post-build
+ -pre-clean
+ -->
+ <import file="custom_rules.xml" optional="true" />
+
+ <!-- Import the actual build file.
+
+ To customize existing targets, there are two options:
+ - Customize only one target:
+ - copy/paste the target into this file, *before* the
+ <import> task.
+ - customize it to your needs.
+ - Customize the whole content of build.xml
+ - copy/paste the content of the rules files (minus the top node)
+ into this file, replacing the <import> task.
+ - customize to your needs.
+
+ ***********************
+ ****** IMPORTANT ******
+ ***********************
+ In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
+ in order to avoid having your file be overridden by tools such as "android update project"
+ -->
+ <!-- version-tag: 1 -->
+ <import file="${sdk.dir}/tools/ant/build.xml" />
+
+</project>
diff --git a/third_party/libwebrtc/examples/androidapp/project.properties b/third_party/libwebrtc/examples/androidapp/project.properties
new file mode 100644
index 0000000000..a6ca533fe3
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/project.properties
@@ -0,0 +1,16 @@
+# This file is automatically generated by Android Tools.
+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
+#
+# This file must be checked in Version Control Systems.
+#
+# To customize properties used by the Ant build system edit
+# "ant.properties", and override values to adapt the script to your
+# project structure.
+#
+# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
+#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
+
+# Project target.
+target=android-22
+
+java.compilerargs=-Xlint:all -Werror
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/disconnect.png b/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/disconnect.png
new file mode 100644
index 0000000000..be36174c24
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/disconnect.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_action_full_screen.png b/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_action_full_screen.png
new file mode 100644
index 0000000000..22f30d31ca
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_action_full_screen.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_action_return_from_full_screen.png b/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_action_return_from_full_screen.png
new file mode 100644
index 0000000000..d9436e5248
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_action_return_from_full_screen.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_launcher.png b/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_launcher.png
new file mode 100644
index 0000000000..f01a31a717
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_launcher.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_loopback_call.png b/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_loopback_call.png
new file mode 100644
index 0000000000..39311853b3
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-hdpi/ic_loopback_call.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/disconnect.png b/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/disconnect.png
new file mode 100644
index 0000000000..be36174c24
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/disconnect.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_action_full_screen.png b/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_action_full_screen.png
new file mode 100644
index 0000000000..e4a9ff0a8e
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_action_full_screen.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_action_return_from_full_screen.png b/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_action_return_from_full_screen.png
new file mode 100644
index 0000000000..f5c80f00e7
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_action_return_from_full_screen.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_launcher.png b/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_launcher.png
new file mode 100644
index 0000000000..5492ed770a
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_launcher.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_loopback_call.png b/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_loopback_call.png
new file mode 100644
index 0000000000..39311853b3
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-ldpi/ic_loopback_call.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/disconnect.png b/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/disconnect.png
new file mode 100644
index 0000000000..be36174c24
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/disconnect.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_action_full_screen.png b/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_action_full_screen.png
new file mode 100644
index 0000000000..e4a9ff0a8e
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_action_full_screen.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_action_return_from_full_screen.png b/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_action_return_from_full_screen.png
new file mode 100644
index 0000000000..f5c80f00e7
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_action_return_from_full_screen.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_launcher.png b/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_launcher.png
new file mode 100644
index 0000000000..b8b4b0ec4b
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_launcher.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_loopback_call.png b/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_loopback_call.png
new file mode 100644
index 0000000000..39311853b3
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-mdpi/ic_loopback_call.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/disconnect.png b/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/disconnect.png
new file mode 100644
index 0000000000..be36174c24
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/disconnect.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_action_full_screen.png b/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_action_full_screen.png
new file mode 100644
index 0000000000..6d90c071d5
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_action_full_screen.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_action_return_from_full_screen.png b/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_action_return_from_full_screen.png
new file mode 100644
index 0000000000..a773b34208
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_action_return_from_full_screen.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_launcher.png b/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_launcher.png
new file mode 100644
index 0000000000..a3cd45890c
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_launcher.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_loopback_call.png b/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_loopback_call.png
new file mode 100644
index 0000000000..39311853b3
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/drawable-xhdpi/ic_loopback_call.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidapp/res/layout/activity_call.xml b/third_party/libwebrtc/examples/androidapp/res/layout/activity_call.xml
new file mode 100644
index 0000000000..bf811426f3
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/layout/activity_call.xml
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="utf-8"?>
+
+<!-- tools:ignore is needed because lint thinks this can be replaced with a merge. Replacing this
+ with a merge causes the fullscreen SurfaceView not to be centered. -->
+<FrameLayout
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ xmlns:tools="http://schemas.android.com/tools"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ tools:ignore="MergeRootFrame">
+
+ <org.webrtc.SurfaceViewRenderer
+ android:id="@+id/fullscreen_video_view"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_gravity="center" />
+
+ <org.webrtc.SurfaceViewRenderer
+ android:id="@+id/pip_video_view"
+ android:layout_height="144dp"
+ android:layout_width="wrap_content"
+ android:layout_gravity="bottom|end"
+ android:layout_margin="16dp"/>
+
+ <FrameLayout
+ android:id="@+id/call_fragment_container"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent" />
+ <FrameLayout
+ android:id="@+id/hud_fragment_container"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent" />
+
+</FrameLayout>
diff --git a/third_party/libwebrtc/examples/androidapp/res/layout/activity_connect.xml b/third_party/libwebrtc/examples/androidapp/res/layout/activity_connect.xml
new file mode 100644
index 0000000000..017e5cabff
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/layout/activity_connect.xml
@@ -0,0 +1,80 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ xmlns:tools="http://schemas.android.com/tools"
+ android:layout_margin="16dp"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:orientation="vertical"
+ android:weightSum="1"
+ android:layout_centerHorizontal="true">
+
+ <TextView
+ android:id="@+id/room_edittext_description"
+ android:layout_width="match_parent"
+ android:layout_height="wrap_content"
+ android:text="@string/room_description"/>
+
+ <LinearLayout
+ android:orientation="horizontal"
+ android:layout_width="match_parent"
+ android:layout_height="wrap_content"
+ android:gravity="center"
+ android:layout_marginBottom="8dp">
+
+ <!-- TODO(crbug.com/900912): Fix and remove lint ignore -->
+ <EditText
+ tools:ignore="LabelFor,Autofill"
+ android:id="@+id/room_edittext"
+ android:layout_width="0dp"
+ android:layout_height="wrap_content"
+ android:layout_weight="1"
+ android:maxLines="1"
+ android:imeOptions="actionDone"
+ android:inputType="text"/>
+
+ <ImageButton
+ android:id="@+id/connect_button"
+ android:layout_width="48dp"
+ android:layout_height="48dp"
+ android:contentDescription="@string/connect_description"
+ android:background="@android:drawable/sym_action_call" />
+
+ <ImageButton
+ android:id="@+id/add_favorite_button"
+ android:layout_width="48dp"
+ android:layout_height="48dp"
+ android:contentDescription="@string/add_favorite_description"
+ android:background="@android:drawable/ic_input_add" />
+ </LinearLayout>
+
+ <TextView
+ android:id="@+id/room_listview_description"
+ android:layout_width="match_parent"
+ android:layout_height="48dp"
+ android:layout_marginTop="8dp"
+ android:lines="1"
+ android:maxLines="1"
+ android:textAppearance="?android:attr/textAppearanceMedium"
+ android:text="@string/favorites"
+ android:gravity="center_vertical"/>
+
+ <FrameLayout
+ android:layout_width="match_parent"
+ android:layout_height="0dp"
+ android:layout_weight="1">
+
+ <ListView
+ android:id="@+id/room_listview"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:drawSelectorOnTop="false" />
+
+ <TextView
+ android:id="@android:id/empty"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:gravity="center"
+ android:text="@string/no_favorites" />
+ </FrameLayout>
+</LinearLayout>
diff --git a/third_party/libwebrtc/examples/androidapp/res/layout/fragment_call.xml b/third_party/libwebrtc/examples/androidapp/res/layout/fragment_call.xml
new file mode 100644
index 0000000000..90b1e9ca0e
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/layout/fragment_call.xml
@@ -0,0 +1,77 @@
+<?xml version="1.0" encoding="utf-8"?>
+
+<RelativeLayout
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent">
+
+ <TextView
+ android:id="@+id/contact_name_call"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_centerHorizontal="true"
+ android:layout_above="@+id/buttons_call_container"
+ android:textSize="24sp"
+ android:layout_margin="8dp"/>
+
+ <LinearLayout
+ android:id="@+id/buttons_call_container"
+ android:orientation="horizontal"
+ android:layout_above="@+id/capture_format_text_call"
+ android:layout_alignWithParentIfMissing="true"
+ android:layout_marginBottom="32dp"
+ android:layout_centerHorizontal="true"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content">
+
+ <ImageButton
+ android:id="@+id/button_call_disconnect"
+ android:background="@drawable/disconnect"
+ android:contentDescription="@string/disconnect_call"
+ android:layout_marginEnd="16dp"
+ android:layout_width="48dp"
+ android:layout_height="48dp"/>
+
+ <ImageButton
+ android:id="@+id/button_call_switch_camera"
+ android:background="@android:drawable/ic_menu_camera"
+ android:contentDescription="@string/switch_camera"
+ android:layout_marginEnd="8dp"
+ android:layout_width="48dp"
+ android:layout_height="48dp"/>
+
+ <ImageButton
+ android:id="@+id/button_call_scaling_mode"
+ android:background="@drawable/ic_action_return_from_full_screen"
+ android:contentDescription="@string/disconnect_call"
+ android:layout_width="48dp"
+ android:layout_height="48dp"/>
+
+ <ImageButton
+ android:id="@+id/button_call_toggle_mic"
+ android:background="@android:drawable/ic_btn_speak_now"
+ android:contentDescription="@string/toggle_mic"
+ android:layout_marginEnd="8dp"
+ android:layout_width="48dp"
+ android:layout_height="48dp"/>
+ </LinearLayout>
+
+ <TextView
+ android:id="@+id/capture_format_text_call"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_centerHorizontal="true"
+ android:layout_above="@+id/capture_format_slider_call"
+ android:textSize="16sp"
+ android:text="@string/capture_format_change_text"/>
+
+ <SeekBar
+ android:id="@+id/capture_format_slider_call"
+ android:layout_width="match_parent"
+ android:layout_height="wrap_content"
+ android:layout_centerHorizontal="true"
+ android:layout_alignParentBottom="true"
+ android:progress="50"
+ android:layout_margin="8dp"/>
+
+</RelativeLayout>
diff --git a/third_party/libwebrtc/examples/androidapp/res/layout/fragment_hud.xml b/third_party/libwebrtc/examples/androidapp/res/layout/fragment_hud.xml
new file mode 100644
index 0000000000..483e7ba456
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/layout/fragment_hud.xml
@@ -0,0 +1,27 @@
+<?xml version="1.0" encoding="utf-8"?>
+
+<RelativeLayout
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent">
+
+ <ImageButton
+ android:id="@+id/button_toggle_debug"
+ android:background="@android:drawable/ic_menu_info_details"
+ android:contentDescription="@string/toggle_debug"
+ android:layout_alignParentBottom="true"
+ android:layout_alignParentStart="true"
+ android:layout_width="48dp"
+ android:layout_height="48dp"/>
+
+ <TextView
+ android:id="@+id/hud_stat_call"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_alignParentEnd="true"
+ android:textStyle="bold"
+ android:textColor="#C000FF00"
+ android:textSize="12sp"
+ android:layout_margin="8dp"/>
+
+</RelativeLayout>
diff --git a/third_party/libwebrtc/examples/androidapp/res/menu/connect_menu.xml b/third_party/libwebrtc/examples/androidapp/res/menu/connect_menu.xml
new file mode 100644
index 0000000000..a723f54941
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/menu/connect_menu.xml
@@ -0,0 +1,13 @@
+<menu xmlns:android="http://schemas.android.com/apk/res/android">
+ <item
+ android:id="@+id/action_loopback"
+ android:icon="@drawable/ic_loopback_call"
+ android:showAsAction="always"
+ android:title="@string/action_loopback"/>
+ <item
+ android:id="@+id/action_settings"
+ android:orderInCategory="100"
+ android:icon="@android:drawable/ic_menu_preferences"
+ android:showAsAction="ifRoom"
+ android:title="@string/action_settings"/>
+</menu>
diff --git a/third_party/libwebrtc/examples/androidapp/res/values-v17/styles.xml b/third_party/libwebrtc/examples/androidapp/res/values-v17/styles.xml
new file mode 100644
index 0000000000..969b5012e9
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/values-v17/styles.xml
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <style name="AppTheme" parent="android:Theme.Holo" />
+
+ <style name="CallActivityTheme" parent="android:Theme.Black">
+ <item name="android:windowActionBar">false</item>
+ <item name="android:windowFullscreen">true</item>
+ <item name="android:windowNoTitle">true</item>
+ </style>
+</resources>
diff --git a/third_party/libwebrtc/examples/androidapp/res/values-v21/styles.xml b/third_party/libwebrtc/examples/androidapp/res/values-v21/styles.xml
new file mode 100644
index 0000000000..b19af7e38f
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/values-v21/styles.xml
@@ -0,0 +1,4 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <style name="AppTheme" parent="android:Theme.Material" />
+</resources>
diff --git a/third_party/libwebrtc/examples/androidapp/res/values/arrays.xml b/third_party/libwebrtc/examples/androidapp/res/values/arrays.xml
new file mode 100644
index 0000000000..4a2948c875
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/values/arrays.xml
@@ -0,0 +1,61 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <string-array name="videoResolutions">
+ <item>Default</item>
+ <item>4K (3840 x 2160)</item>
+ <item>Full HD (1920 x 1080)</item>
+ <item>HD (1280 x 720)</item>
+ <item>VGA (640 x 480)</item>
+ <item>QVGA (320 x 240)</item>
+ </string-array>
+
+ <string-array name="videoResolutionsValues">
+ <item>Default</item>
+ <item>3840 x 2160</item>
+ <item>1920 x 1080</item>
+ <item>1280 x 720</item>
+ <item>640 x 480</item>
+ <item>320 x 240</item>
+ </string-array>
+
+ <string-array name="cameraFps">
+ <item>Default</item>
+ <item>30 fps</item>
+ <item>15 fps</item>
+ </string-array>
+
+ <string-array name="startBitrate">
+ <item>Default</item>
+ <item>Manual</item>
+ </string-array>
+
+ <string-array name="videoCodecs">
+ <item>VP8</item>
+ <item>VP9</item>
+ <item>H264 Baseline</item>
+ <item>H264 High</item>
+ <item>AV1</item>
+ </string-array>
+
+ <string-array name="audioCodecs">
+ <item>OPUS</item>
+ <item>ISAC</item>
+ </string-array>
+
+ <string-array name="speakerphone">
+ <item>Auto (proximity sensor)</item>
+ <item>Enabled</item>
+ <item>Disabled</item>
+ </string-array>
+
+ <string-array name="speakerphoneValues">
+ <item>auto</item>
+ <item>true</item>
+ <item>false</item>
+ </string-array>
+
+ <string-array name="roomListContextMenu">
+ <item>Remove favorite</item>
+ </string-array>
+
+</resources>
diff --git a/third_party/libwebrtc/examples/androidapp/res/values/strings.xml b/third_party/libwebrtc/examples/androidapp/res/values/strings.xml
new file mode 100644
index 0000000000..814966f200
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/values/strings.xml
@@ -0,0 +1,224 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <string name="app_name" translatable="false">AppRTC</string>
+ <string name="settings_name" translatable="false">AppRTC Settings</string>
+ <string name="disconnect_call">Disconnect Call</string>
+ <string name="room_description">
+ Please enter a room name. Room names are shared with everyone, so think
+ of something unique and send it to a friend.
+ </string>
+ <string name="favorites">Favorites</string>
+ <string name="no_favorites">No favorites</string>
+ <string name="invalid_url_title">Invalid URL</string>
+ <string name="invalid_url_text">The URL or room name you entered resulted in an invalid URL: %1$s
+ </string>
+ <string name="channel_error_title">Connection error</string>
+ <string name="connecting_to">Connecting to: %1$s</string>
+ <string name="missing_url">FATAL ERROR: Missing URL to connect to.</string>
+ <string name="camera2_texture_only_error">Camera2 only supports capturing to texture. Either disable Camera2 or enable capturing to texture in the options.</string>
+ <string name="ok">OK</string>
+ <string name="switch_camera">Switch front/back camera</string>
+ <string name="capture_format_change_text">Slide to change capture format</string>
+ <string name="muted">Muted</string>
+ <string name="toggle_debug">Toggle debug view</string>
+ <string name="toggle_mic">Toggle microphone on/off</string>
+ <string name="action_settings">Settings</string>
+ <string name="action_loopback">Loopback connection</string>
+ <string name="connect_description">Connect to the room</string>
+ <string name="add_favorite_description">Add favorite</string>
+ <string name="format_description">%1$dx%2$d @ %3$d fps</string>
+ <string name="missing_permissions_try_again">The application is missing permissions. It might not work correctly. Do you want to try again?</string>
+ <string name="yes">Yes</string>
+ <string name="no">No</string>
+
+ <!-- Settings strings. -->
+ <string name="pref_room_key">room_preference</string>
+ <string name="pref_room_list_key">room_list_preference</string>
+
+ <string name="pref_videosettings_key">video_settings_key</string>
+ <string name="pref_videosettings_title">WebRTC video settings.</string>
+
+ <string name="pref_videocall_key">videocall_preference</string>
+ <string name="pref_videocall_title">Video call.</string>
+ <string name="pref_videocall_dlg">Enable video in a call.</string>
+ <string name="pref_videocall_default">true</string>
+
+ <string name="pref_screencapture_key">screencapture_preference</string>
+ <string name="pref_screencapture_title">Use screencapture.</string>
+ <string name="pref_screencapture_default">false</string>
+
+ <string name="pref_camera2_key">camera2_preference</string>
+ <string name="pref_camera2_title">Use Camera2.</string>
+ <string name="pref_camera2_default">true</string>
+ <string name="pref_camera2_not_supported">Not supported on this device.</string>
+
+ <string name="pref_resolution_key">resolution_preference</string>
+ <string name="pref_resolution_title">Video resolution.</string>
+ <string name="pref_resolution_dlg">Enter AppRTC local video resolution.</string>
+ <string name="pref_resolution_default">Default</string>
+
+ <string name="pref_fps_key">fps_preference</string>
+ <string name="pref_fps_title">Camera fps.</string>
+ <string name="pref_fps_dlg">Enter local camera fps.</string>
+ <string name="pref_fps_default">Default</string>
+
+ <string name="pref_capturequalityslider_key">capturequalityslider_preference</string>
+ <string name="pref_capturequalityslider_title">Capture quality slider.</string>
+ <string name="pref_capturequalityslider_dlg">Enable slider for changing capture quality.</string>
+ <string name="pref_capturequalityslider_default">false</string>
+
+ <string name="pref_maxvideobitrate_key">maxvideobitrate_preference</string>
+ <string name="pref_maxvideobitrate_title">Maximum video bitrate setting.</string>
+ <string name="pref_maxvideobitrate_dlg">Maximum video bitrate setting.</string>
+ <string name="pref_maxvideobitrate_default">Default</string>
+
+ <string name="pref_maxvideobitratevalue_key">maxvideobitratevalue_preference</string>
+ <string name="pref_maxvideobitratevalue_title">Video encoder maximum bitrate.</string>
+ <string name="pref_maxvideobitratevalue_dlg">Enter video encoder maximum bitrate in kbps.</string>
+ <string name="pref_maxvideobitratevalue_default">1700</string>
+
+ <string name="pref_videocodec_key">videocodec_preference</string>
+ <string name="pref_videocodec_title">Default video codec.</string>
+ <string name="pref_videocodec_dlg">Select default video codec.</string>
+ <string name="pref_videocodec_default">VP8</string>
+
+ <string name="pref_hwcodec_key">hwcodec_preference</string>
+ <string name="pref_hwcodec_title">Video codec hardware acceleration.</string>
+ <string name="pref_hwcodec_dlg">Use hardware accelerated video codec (if available).</string>
+ <string name="pref_hwcodec_default">true</string>
+
+ <string name="pref_capturetotexture_key">capturetotexture_preference</string>
+ <string name="pref_capturetotexture_title">Video capture to surface texture.</string>
+ <string name="pref_capturetotexture_dlg">Capture video to textures (if available).</string>
+ <string name="pref_capturetotexture_default">true</string>
+
+ <string name="pref_flexfec_key">flexfec_preference</string>
+ <string name="pref_flexfec_title">Codec-agnostic Flexible FEC.</string>
+ <string name="pref_flexfec_dlg">Enable FlexFEC.</string>
+ <string name="pref_flexfec_default">false</string>
+
+ <string name="pref_value_enabled">Enabled</string>
+ <string name="pref_value_disabled">Disabled</string>
+
+ <string name="pref_audiosettings_key">audio_settings_key</string>
+ <string name="pref_audiosettings_title">WebRTC audio settings.</string>
+
+ <string name="pref_startaudiobitrate_key">startaudiobitrate_preference</string>
+ <string name="pref_startaudiobitrate_title">Audio bitrate setting.</string>
+ <string name="pref_startaudiobitrate_dlg">Audio bitrate setting.</string>
+ <string name="pref_startaudiobitrate_default">Default</string>
+
+ <string name="pref_startaudiobitratevalue_key">startaudiobitratevalue_preference</string>
+ <string name="pref_startaudiobitratevalue_title">Audio codec bitrate.</string>
+ <string name="pref_startaudiobitratevalue_dlg">Enter audio codec bitrate in kbps.</string>
+ <string name="pref_startaudiobitratevalue_default">32</string>
+
+ <string name="pref_audiocodec_key">audiocodec_preference</string>
+ <string name="pref_audiocodec_title">Default audio codec.</string>
+ <string name="pref_audiocodec_dlg">Select default audio codec.</string>
+ <string name="pref_audiocodec_default">OPUS</string>
+
+ <string name="pref_noaudioprocessing_key">audioprocessing_preference</string>
+ <string name="pref_noaudioprocessing_title">Disable audio processing.</string>
+ <string name="pref_noaudioprocessing_dlg">Disable audio processing pipeline.</string>
+ <string name="pref_noaudioprocessing_default">false</string>
+
+ <string name="pref_aecdump_key">aecdump_preference</string>
+ <string name="pref_aecdump_title">Create aecdump.</string>
+ <string name="pref_aecdump_dlg">Enable diagnostic audio recordings.</string>
+ <string name="pref_aecdump_default">false</string>
+
+ <string name="pref_enable_save_input_audio_to_file_key">enable_key</string>
+ <string name="pref_enable_save_input_audio_to_file_title">Save input audio to file.</string>
+ <string name="pref_enable_save_input_audio_to_file_dlg">Save input audio to file.</string>
+ <string name="pref_enable_save_input_audio_to_file_default">false</string>
+
+ <string name="pref_opensles_key">opensles_preference</string>
+ <string name="pref_opensles_title">Use OpenSL ES for audio playback.</string>
+ <string name="pref_opensles_dlg">Use OpenSL ES for audio playback.</string>
+ <string name="pref_opensles_default">false</string>
+
+ <string name="pref_disable_built_in_aec_key">disable_built_in_aec_preference</string>
+ <string name="pref_disable_built_in_aec_title">Disable hardware AEC.</string>
+ <string name="pref_disable_built_in_aec_dlg">Disable hardware AEC.</string>
+ <string name="pref_disable_built_in_aec_default">false</string>
+ <string name="pref_built_in_aec_not_available">Hardware AEC is not available</string>
+
+ <string name="pref_disable_built_in_agc_key">disable_built_in_agc_preference</string>
+ <string name="pref_disable_built_in_agc_title">Disable hardware AGC.</string>
+ <string name="pref_disable_built_in_agc_dlg">Disable hardware AGC.</string>
+ <string name="pref_disable_built_in_agc_default">false</string>
+ <string name="pref_built_in_agc_not_available">Hardware AGC is not available</string>
+
+ <string name="pref_disable_built_in_ns_key">disable_built_in_ns_preference</string>
+ <string name="pref_disable_built_in_ns_title">Disable hardware NS.</string>
+ <string name="pref_disable_built_in_ns_dlg">Disable hardware NS.</string>
+ <string name="pref_disable_built_in_ns_default">false</string>
+ <string name="pref_built_in_ns_not_available">Hardware NS is not available</string>
+
+ <string name="pref_disable_webrtc_agc_and_hpf_key">disable_webrtc_agc_and_hpf_preference</string>
+ <string name="pref_disable_webrtc_agc_and_hpf_title">Disable WebRTC AGC and HPF.</string>
+ <string name="pref_disable_webrtc_agc_default">false</string>
+
+ <string name="pref_speakerphone_key">speakerphone_preference</string>
+ <string name="pref_speakerphone_title">Speakerphone.</string>
+ <string name="pref_speakerphone_dlg">Speakerphone.</string>
+ <string name="pref_speakerphone_default">auto</string>
+
+ <string name="pref_datasettings_key">data_settings_key</string>
+ <string name="pref_datasettings_title">WebRTC data channel settings.</string>
+
+ <string name="pref_enable_datachannel_key">enable_datachannel_preference</string>
+ <string name="pref_enable_datachannel_title">Enable datachannel.</string>
+ <string name="pref_enable_datachannel_default" translatable="false">true</string>
+
+ <string name="pref_ordered_key">ordered_preference</string>
+ <string name="pref_ordered_title">Order messages.</string>
+ <string name="pref_ordered_default" translatable="false">true</string>
+
+ <string name="pref_data_protocol_key">Subprotocol</string>
+ <string name="pref_data_protocol_title">Subprotocol.</string>
+ <string name="pref_data_protocol_dlg">Enter subprotocol.</string>
+ <string name="pref_data_protocol_default" translatable="false"></string>
+
+ <string name="pref_negotiated_key">negotiated_preference</string>
+ <string name="pref_negotiated_title">Negotiated.</string>
+ <string name="pref_negotiated_default" translatable="false">false</string>
+
+ <string name="pref_max_retransmit_time_ms_key">max_retransmit_time_ms_preference</string>
+ <string name="pref_max_retransmit_time_ms_title">Max delay to retransmit.</string>
+ <string name="pref_max_retransmit_time_ms_dlg">Enter max delay to retransmit (in ms).</string>
+ <string name="pref_max_retransmit_time_ms_default" translatable="false">-1</string>
+
+ <string name="pref_max_retransmits_key">max_retransmits_preference</string>
+ <string name="pref_max_retransmits_title">Max attempts to retransmit.</string>
+ <string name="pref_max_retransmits_dlg">Enter max attempts to retransmit.</string>
+ <string name="pref_max_retransmits_default" translatable="false">-1</string>
+
+ <string name="pref_data_id_key">data_id_preference</string>
+ <string name="pref_data_id_title">Data id.</string>
+ <string name="pref_data_id_dlg">Enter data channel id.</string>
+ <string name="pref_data_id_default" translatable="false">-1</string>
+
+ <string name="pref_miscsettings_key">misc_settings_key</string>
+ <string name="pref_miscsettings_title">Miscellaneous settings.</string>
+
+ <string name="pref_room_server_url_key">room_server_url_preference</string>
+ <string name="pref_room_server_url_title">Room server URL.</string>
+ <string name="pref_room_server_url_dlg">Enter a room server URL.</string>
+ <string name="pref_room_server_url_default" translatable="false">https://appr.tc</string>
+
+ <string name="pref_displayhud_key">displayhud_preference</string>
+ <string name="pref_displayhud_title">Display call statistics.</string>
+ <string name="pref_displayhud_dlg">Display call statistics.</string>
+ <string name="pref_displayhud_default" translatable="false">false</string>
+
+ <string name="pref_tracing_key">tracing_preference</string>
+ <string name="pref_tracing_title">Debug performance tracing.</string>
+ <string name="pref_tracing_dlg">Debug performance tracing.</string>
+ <string name="pref_tracing_default" translatable="false">false</string>
+
+ <string name="pref_enable_rtceventlog_key">enable_rtceventlog_key</string>
+ <string name="pref_enable_rtceventlog_title">Enable RtcEventLog.</string>
+ <string name="pref_enable_rtceventlog_default">false</string>
+</resources>
diff --git a/third_party/libwebrtc/examples/androidapp/res/xml/preferences.xml b/third_party/libwebrtc/examples/androidapp/res/xml/preferences.xml
new file mode 100644
index 0000000000..14e74d5c0b
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/res/xml/preferences.xml
@@ -0,0 +1,247 @@
+<?xml version="1.0" encoding="utf-8"?>
+<PreferenceScreen xmlns:android="http://schemas.android.com/apk/res/android">
+ <PreferenceCategory
+ android:key="@string/pref_videosettings_key"
+ android:title="@string/pref_videosettings_title">
+
+ <CheckBoxPreference
+ android:key="@string/pref_videocall_key"
+ android:title="@string/pref_videocall_title"
+ android:dialogTitle="@string/pref_videocall_dlg"
+ android:defaultValue="@string/pref_videocall_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_screencapture_key"
+ android:title="@string/pref_screencapture_title"
+ android:defaultValue="@string/pref_screencapture_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_camera2_key"
+ android:title="@string/pref_camera2_title"
+ android:defaultValue="@string/pref_camera2_default" />
+
+ <ListPreference
+ android:key="@string/pref_resolution_key"
+ android:title="@string/pref_resolution_title"
+ android:defaultValue="@string/pref_resolution_default"
+ android:dialogTitle="@string/pref_resolution_dlg"
+ android:entries="@array/videoResolutions"
+ android:entryValues="@array/videoResolutionsValues" />
+
+ <ListPreference
+ android:key="@string/pref_fps_key"
+ android:title="@string/pref_fps_title"
+ android:defaultValue="@string/pref_fps_default"
+ android:dialogTitle="@string/pref_fps_dlg"
+ android:entries="@array/cameraFps"
+ android:entryValues="@array/cameraFps" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_capturequalityslider_key"
+ android:title="@string/pref_capturequalityslider_title"
+ android:dialogTitle="@string/pref_capturequalityslider_dlg"
+ android:defaultValue="@string/pref_capturequalityslider_default" />
+
+ <ListPreference
+ android:key="@string/pref_maxvideobitrate_key"
+ android:title="@string/pref_maxvideobitrate_title"
+ android:defaultValue="@string/pref_maxvideobitrate_default"
+ android:dialogTitle="@string/pref_maxvideobitrate_dlg"
+ android:entries="@array/startBitrate"
+ android:entryValues="@array/startBitrate" />
+
+ <EditTextPreference
+ android:key="@string/pref_maxvideobitratevalue_key"
+ android:title="@string/pref_maxvideobitratevalue_title"
+ android:inputType="number"
+ android:defaultValue="@string/pref_maxvideobitratevalue_default"
+ android:dialogTitle="@string/pref_maxvideobitratevalue_dlg" />
+
+ <ListPreference
+ android:key="@string/pref_videocodec_key"
+ android:title="@string/pref_videocodec_title"
+ android:defaultValue="@string/pref_videocodec_default"
+ android:dialogTitle="@string/pref_videocodec_dlg"
+ android:entries="@array/videoCodecs"
+ android:entryValues="@array/videoCodecs" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_hwcodec_key"
+ android:title="@string/pref_hwcodec_title"
+ android:dialogTitle="@string/pref_hwcodec_dlg"
+ android:defaultValue="@string/pref_hwcodec_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_capturetotexture_key"
+ android:title="@string/pref_capturetotexture_title"
+ android:dialogTitle="@string/pref_capturetotexture_dlg"
+ android:defaultValue="@string/pref_capturetotexture_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_flexfec_key"
+ android:title="@string/pref_flexfec_title"
+ android:dialogTitle="@string/pref_flexfec_dlg"
+ android:defaultValue="@string/pref_flexfec_default" />
+ </PreferenceCategory>
+
+ <PreferenceCategory
+ android:key="@string/pref_audiosettings_key"
+ android:title="@string/pref_audiosettings_title">
+
+ <ListPreference
+ android:key="@string/pref_startaudiobitrate_key"
+ android:title="@string/pref_startaudiobitrate_title"
+ android:defaultValue="@string/pref_startaudiobitrate_default"
+ android:dialogTitle="@string/pref_startaudiobitrate_dlg"
+ android:entries="@array/startBitrate"
+ android:entryValues="@array/startBitrate" />
+
+ <EditTextPreference
+ android:key="@string/pref_startaudiobitratevalue_key"
+ android:title="@string/pref_startaudiobitratevalue_title"
+ android:inputType="number"
+ android:defaultValue="@string/pref_startaudiobitratevalue_default"
+ android:dialogTitle="@string/pref_startaudiobitratevalue_dlg" />
+
+ <ListPreference
+ android:key="@string/pref_audiocodec_key"
+ android:title="@string/pref_audiocodec_title"
+ android:defaultValue="@string/pref_audiocodec_default"
+ android:dialogTitle="@string/pref_audiocodec_dlg"
+ android:entries="@array/audioCodecs"
+ android:entryValues="@array/audioCodecs" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_noaudioprocessing_key"
+ android:title="@string/pref_noaudioprocessing_title"
+ android:dialogTitle="@string/pref_noaudioprocessing_dlg"
+ android:defaultValue="@string/pref_noaudioprocessing_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_aecdump_key"
+ android:title="@string/pref_aecdump_title"
+ android:dialogTitle="@string/pref_aecdump_dlg"
+ android:defaultValue="@string/pref_aecdump_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_enable_save_input_audio_to_file_key"
+ android:title="@string/pref_enable_save_input_audio_to_file_title"
+ android:dialogTitle="@string/pref_enable_save_input_audio_to_file_dlg"
+ android:defaultValue="@string/pref_enable_save_input_audio_to_file_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_opensles_key"
+ android:title="@string/pref_opensles_title"
+ android:dialogTitle="@string/pref_opensles_dlg"
+ android:defaultValue="@string/pref_opensles_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_disable_built_in_aec_key"
+ android:title="@string/pref_disable_built_in_aec_title"
+ android:dialogTitle="@string/pref_disable_built_in_aec_dlg"
+ android:defaultValue="@string/pref_disable_built_in_aec_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_disable_built_in_agc_key"
+ android:title="@string/pref_disable_built_in_agc_title"
+ android:dialogTitle="@string/pref_disable_built_in_agc_dlg"
+ android:defaultValue="@string/pref_disable_built_in_agc_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_disable_built_in_ns_key"
+ android:title="@string/pref_disable_built_in_ns_title"
+ android:dialogTitle="@string/pref_disable_built_in_ns_dlg"
+ android:defaultValue="@string/pref_disable_built_in_ns_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_disable_webrtc_agc_and_hpf_key"
+ android:title="@string/pref_disable_webrtc_agc_and_hpf_title"
+ android:defaultValue="@string/pref_disable_webrtc_agc_default" />
+
+ <ListPreference
+ android:key="@string/pref_speakerphone_key"
+ android:title="@string/pref_speakerphone_title"
+ android:defaultValue="@string/pref_speakerphone_default"
+ android:dialogTitle="@string/pref_speakerphone_dlg"
+ android:entries="@array/speakerphone"
+ android:entryValues="@array/speakerphoneValues" />
+ </PreferenceCategory>
+
+ <PreferenceCategory
+ android:key="@string/pref_datasettings_key"
+ android:title="@string/pref_datasettings_title">
+
+ <CheckBoxPreference
+ android:key="@string/pref_enable_datachannel_key"
+ android:title="@string/pref_enable_datachannel_title"
+ android:defaultValue="@string/pref_enable_datachannel_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_ordered_key"
+ android:title="@string/pref_ordered_title"
+ android:defaultValue="@string/pref_ordered_default" />
+
+ <EditTextPreference
+ android:key="@string/pref_data_protocol_key"
+ android:title="@string/pref_data_protocol_title"
+ android:inputType="text"
+ android:defaultValue="@string/pref_data_protocol_default"
+ android:dialogTitle="@string/pref_data_protocol_dlg" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_negotiated_key"
+ android:title="@string/pref_negotiated_title"
+ android:defaultValue="@string/pref_negotiated_default" />
+
+ <EditTextPreference
+ android:key="@string/pref_max_retransmit_time_ms_key"
+ android:title="@string/pref_max_retransmit_time_ms_title"
+ android:inputType="number"
+ android:defaultValue="@string/pref_max_retransmit_time_ms_default"
+ android:dialogTitle="@string/pref_max_retransmit_time_ms_dlg" />
+
+ <EditTextPreference
+ android:key="@string/pref_max_retransmits_key"
+ android:title="@string/pref_max_retransmits_title"
+ android:inputType="number"
+ android:defaultValue="@string/pref_max_retransmits_default"
+ android:dialogTitle="@string/pref_max_retransmits_dlg" />
+
+ <EditTextPreference
+ android:key="@string/pref_data_id_key"
+ android:title="@string/pref_data_id_title"
+ android:inputType="number"
+ android:defaultValue="@string/pref_data_id_default"
+ android:dialogTitle="@string/pref_data_id_dlg" />
+ </PreferenceCategory>
+
+ <PreferenceCategory
+ android:key="@string/pref_miscsettings_key"
+ android:title="@string/pref_miscsettings_title">
+
+ <EditTextPreference
+ android:key="@string/pref_room_server_url_key"
+ android:title="@string/pref_room_server_url_title"
+ android:inputType="text"
+ android:defaultValue="@string/pref_room_server_url_default"
+ android:dialogTitle="@string/pref_room_server_url_dlg" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_displayhud_key"
+ android:title="@string/pref_displayhud_title"
+ android:dialogTitle="@string/pref_displayhud_dlg"
+ android:defaultValue="@string/pref_displayhud_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_tracing_key"
+ android:title="@string/pref_tracing_title"
+ android:dialogTitle="@string/pref_tracing_dlg"
+ android:defaultValue="@string/pref_tracing_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_enable_rtceventlog_key"
+ android:title="@string/pref_enable_rtceventlog_title"
+ android:defaultValue="@string/pref_enable_rtceventlog_default"/>
+ </PreferenceCategory>
+
+</PreferenceScreen>
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java
new file mode 100644
index 0000000000..2536b131a1
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java
@@ -0,0 +1,594 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.content.SharedPreferences;
+import android.content.pm.PackageManager;
+import android.media.AudioDeviceInfo;
+import android.media.AudioManager;
+import android.os.Build;
+import android.preference.PreferenceManager;
+import android.util.Log;
+import androidx.annotation.Nullable;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Set;
+import org.appspot.apprtc.util.AppRTCUtils;
+import org.webrtc.ThreadUtils;
+
+/**
+ * AppRTCAudioManager manages all audio related parts of the AppRTC demo.
+ */
+public class AppRTCAudioManager {
+ private static final String TAG = "AppRTCAudioManager";
+ private static final String SPEAKERPHONE_AUTO = "auto";
+ private static final String SPEAKERPHONE_TRUE = "true";
+ private static final String SPEAKERPHONE_FALSE = "false";
+
+ /**
+ * AudioDevice is the names of possible audio devices that we currently
+ * support.
+ */
+ public enum AudioDevice { SPEAKER_PHONE, WIRED_HEADSET, EARPIECE, BLUETOOTH, NONE }
+
+ /** AudioManager state. */
+ public enum AudioManagerState {
+ UNINITIALIZED,
+ PREINITIALIZED,
+ RUNNING,
+ }
+
+ /** Selected audio device change event. */
+ public interface AudioManagerEvents {
+ // Callback fired once audio device is changed or list of available audio devices changed.
+ void onAudioDeviceChanged(
+ AudioDevice selectedAudioDevice, Set<AudioDevice> availableAudioDevices);
+ }
+
+ private final Context apprtcContext;
+ @Nullable
+ private AudioManager audioManager;
+
+ @Nullable
+ private AudioManagerEvents audioManagerEvents;
+ private AudioManagerState amState;
+ private int savedAudioMode = AudioManager.MODE_INVALID;
+ private boolean savedIsSpeakerPhoneOn;
+ private boolean savedIsMicrophoneMute;
+ private boolean hasWiredHeadset;
+
+ // Default audio device; speaker phone for video calls or earpiece for audio
+ // only calls.
+ private AudioDevice defaultAudioDevice;
+
+ // Contains the currently selected audio device.
+ // This device is changed automatically using a certain scheme where e.g.
+ // a wired headset "wins" over speaker phone. It is also possible for a
+ // user to explicitly select a device (and overrid any predefined scheme).
+ // See `userSelectedAudioDevice` for details.
+ private AudioDevice selectedAudioDevice;
+
+ // Contains the user-selected audio device which overrides the predefined
+ // selection scheme.
+ // TODO(henrika): always set to AudioDevice.NONE today. Add support for
+ // explicit selection based on choice by userSelectedAudioDevice.
+ private AudioDevice userSelectedAudioDevice;
+
+ // Contains speakerphone setting: auto, true or false
+ @Nullable private final String useSpeakerphone;
+
+ // Proximity sensor object. It measures the proximity of an object in cm
+ // relative to the view screen of a device and can therefore be used to
+ // assist device switching (close to ear <=> use headset earpiece if
+ // available, far from ear <=> use speaker phone).
+ @Nullable private AppRTCProximitySensor proximitySensor;
+
+ // Handles all tasks related to Bluetooth headset devices.
+ private final AppRTCBluetoothManager bluetoothManager;
+
+ // Contains a list of available audio devices. A Set collection is used to
+ // avoid duplicate elements.
+ private Set<AudioDevice> audioDevices = new HashSet<>();
+
+ // Broadcast receiver for wired headset intent broadcasts.
+ private BroadcastReceiver wiredHeadsetReceiver;
+
+ // Callback method for changes in audio focus.
+ @Nullable
+ private AudioManager.OnAudioFocusChangeListener audioFocusChangeListener;
+
+ /**
+ * This method is called when the proximity sensor reports a state change,
+ * e.g. from "NEAR to FAR" or from "FAR to NEAR".
+ */
+ private void onProximitySensorChangedState() {
+ if (!useSpeakerphone.equals(SPEAKERPHONE_AUTO)) {
+ return;
+ }
+
+ // The proximity sensor should only be activated when there are exactly two
+ // available audio devices.
+ if (audioDevices.size() == 2 && audioDevices.contains(AppRTCAudioManager.AudioDevice.EARPIECE)
+ && audioDevices.contains(AppRTCAudioManager.AudioDevice.SPEAKER_PHONE)) {
+ if (proximitySensor.sensorReportsNearState()) {
+ // Sensor reports that a "handset is being held up to a person's ear",
+ // or "something is covering the light sensor".
+ setAudioDeviceInternal(AppRTCAudioManager.AudioDevice.EARPIECE);
+ } else {
+ // Sensor reports that a "handset is removed from a person's ear", or
+ // "the light sensor is no longer covered".
+ setAudioDeviceInternal(AppRTCAudioManager.AudioDevice.SPEAKER_PHONE);
+ }
+ }
+ }
+
+ /* Receiver which handles changes in wired headset availability. */
+ private class WiredHeadsetReceiver extends BroadcastReceiver {
+ private static final int STATE_UNPLUGGED = 0;
+ private static final int STATE_PLUGGED = 1;
+ private static final int HAS_NO_MIC = 0;
+ private static final int HAS_MIC = 1;
+
+ @Override
+ public void onReceive(Context context, Intent intent) {
+ int state = intent.getIntExtra("state", STATE_UNPLUGGED);
+ int microphone = intent.getIntExtra("microphone", HAS_NO_MIC);
+ String name = intent.getStringExtra("name");
+ Log.d(TAG, "WiredHeadsetReceiver.onReceive" + AppRTCUtils.getThreadInfo() + ": "
+ + "a=" + intent.getAction() + ", s="
+ + (state == STATE_UNPLUGGED ? "unplugged" : "plugged") + ", m="
+ + (microphone == HAS_MIC ? "mic" : "no mic") + ", n=" + name + ", sb="
+ + isInitialStickyBroadcast());
+ hasWiredHeadset = (state == STATE_PLUGGED);
+ updateAudioDeviceState();
+ }
+ }
+
+ /** Construction. */
+ static AppRTCAudioManager create(Context context) {
+ return new AppRTCAudioManager(context);
+ }
+
+ private AppRTCAudioManager(Context context) {
+ Log.d(TAG, "ctor");
+ ThreadUtils.checkIsOnMainThread();
+ apprtcContext = context;
+ audioManager = ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
+ bluetoothManager = AppRTCBluetoothManager.create(context, this);
+ wiredHeadsetReceiver = new WiredHeadsetReceiver();
+ amState = AudioManagerState.UNINITIALIZED;
+
+ SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context);
+ useSpeakerphone = sharedPreferences.getString(context.getString(R.string.pref_speakerphone_key),
+ context.getString(R.string.pref_speakerphone_default));
+ Log.d(TAG, "useSpeakerphone: " + useSpeakerphone);
+ if (useSpeakerphone.equals(SPEAKERPHONE_FALSE)) {
+ defaultAudioDevice = AudioDevice.EARPIECE;
+ } else {
+ defaultAudioDevice = AudioDevice.SPEAKER_PHONE;
+ }
+
+ // Create and initialize the proximity sensor.
+ // Tablet devices (e.g. Nexus 7) does not support proximity sensors.
+ // Note that, the sensor will not be active until start() has been called.
+ proximitySensor = AppRTCProximitySensor.create(context,
+ // This method will be called each time a state change is detected.
+ // Example: user holds their hand over the device (closer than ~5 cm),
+ // or removes their hand from the device.
+ this ::onProximitySensorChangedState);
+
+ Log.d(TAG, "defaultAudioDevice: " + defaultAudioDevice);
+ AppRTCUtils.logDeviceInfo(TAG);
+ }
+
+ @SuppressWarnings("deprecation") // TODO(henrika): audioManager.requestAudioFocus() is deprecated.
+ public void start(AudioManagerEvents audioManagerEvents) {
+ Log.d(TAG, "start");
+ ThreadUtils.checkIsOnMainThread();
+ if (amState == AudioManagerState.RUNNING) {
+ Log.e(TAG, "AudioManager is already active");
+ return;
+ }
+ // TODO(henrika): perhaps call new method called preInitAudio() here if UNINITIALIZED.
+
+ Log.d(TAG, "AudioManager starts...");
+ this.audioManagerEvents = audioManagerEvents;
+ amState = AudioManagerState.RUNNING;
+
+ // Store current audio state so we can restore it when stop() is called.
+ savedAudioMode = audioManager.getMode();
+ savedIsSpeakerPhoneOn = audioManager.isSpeakerphoneOn();
+ savedIsMicrophoneMute = audioManager.isMicrophoneMute();
+ hasWiredHeadset = hasWiredHeadset();
+
+ // Create an AudioManager.OnAudioFocusChangeListener instance.
+ audioFocusChangeListener = new AudioManager.OnAudioFocusChangeListener() {
+ // Called on the listener to notify if the audio focus for this listener has been changed.
+ // The `focusChange` value indicates whether the focus was gained, whether the focus was lost,
+ // and whether that loss is transient, or whether the new focus holder will hold it for an
+ // unknown amount of time.
+ // TODO(henrika): possibly extend support of handling audio-focus changes. Only contains
+ // logging for now.
+ @Override
+ public void onAudioFocusChange(int focusChange) {
+ final String typeOfChange;
+ switch (focusChange) {
+ case AudioManager.AUDIOFOCUS_GAIN:
+ typeOfChange = "AUDIOFOCUS_GAIN";
+ break;
+ case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT:
+ typeOfChange = "AUDIOFOCUS_GAIN_TRANSIENT";
+ break;
+ case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE:
+ typeOfChange = "AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE";
+ break;
+ case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK:
+ typeOfChange = "AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK";
+ break;
+ case AudioManager.AUDIOFOCUS_LOSS:
+ typeOfChange = "AUDIOFOCUS_LOSS";
+ break;
+ case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
+ typeOfChange = "AUDIOFOCUS_LOSS_TRANSIENT";
+ break;
+ case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
+ typeOfChange = "AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK";
+ break;
+ default:
+ typeOfChange = "AUDIOFOCUS_INVALID";
+ break;
+ }
+ Log.d(TAG, "onAudioFocusChange: " + typeOfChange);
+ }
+ };
+
+ // Request audio playout focus (without ducking) and install listener for changes in focus.
+ int result = audioManager.requestAudioFocus(audioFocusChangeListener,
+ AudioManager.STREAM_VOICE_CALL, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT);
+ if (result == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
+ Log.d(TAG, "Audio focus request granted for VOICE_CALL streams");
+ } else {
+ Log.e(TAG, "Audio focus request failed");
+ }
+
+ // Start by setting MODE_IN_COMMUNICATION as default audio mode. It is
+ // required to be in this mode when playout and/or recording starts for
+ // best possible VoIP performance.
+ audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
+
+ // Always disable microphone mute during a WebRTC call.
+ setMicrophoneMute(false);
+
+ // Set initial device states.
+ userSelectedAudioDevice = AudioDevice.NONE;
+ selectedAudioDevice = AudioDevice.NONE;
+ audioDevices.clear();
+
+ // Initialize and start Bluetooth if a BT device is available or initiate
+ // detection of new (enabled) BT devices.
+ bluetoothManager.start();
+
+ // Do initial selection of audio device. This setting can later be changed
+ // either by adding/removing a BT or wired headset or by covering/uncovering
+ // the proximity sensor.
+ updateAudioDeviceState();
+
+ // Register receiver for broadcast intents related to adding/removing a
+ // wired headset.
+ registerReceiver(wiredHeadsetReceiver, new IntentFilter(Intent.ACTION_HEADSET_PLUG));
+ Log.d(TAG, "AudioManager started");
+ }
+
+ @SuppressWarnings("deprecation") // TODO(henrika): audioManager.abandonAudioFocus() is deprecated.
+ public void stop() {
+ Log.d(TAG, "stop");
+ ThreadUtils.checkIsOnMainThread();
+ if (amState != AudioManagerState.RUNNING) {
+ Log.e(TAG, "Trying to stop AudioManager in incorrect state: " + amState);
+ return;
+ }
+ amState = AudioManagerState.UNINITIALIZED;
+
+ unregisterReceiver(wiredHeadsetReceiver);
+
+ bluetoothManager.stop();
+
+ // Restore previously stored audio states.
+ setSpeakerphoneOn(savedIsSpeakerPhoneOn);
+ setMicrophoneMute(savedIsMicrophoneMute);
+ audioManager.setMode(savedAudioMode);
+
+ // Abandon audio focus. Gives the previous focus owner, if any, focus.
+ audioManager.abandonAudioFocus(audioFocusChangeListener);
+ audioFocusChangeListener = null;
+ Log.d(TAG, "Abandoned audio focus for VOICE_CALL streams");
+
+ if (proximitySensor != null) {
+ proximitySensor.stop();
+ proximitySensor = null;
+ }
+
+ audioManagerEvents = null;
+ Log.d(TAG, "AudioManager stopped");
+ }
+
+ /** Changes selection of the currently active audio device. */
+ private void setAudioDeviceInternal(AudioDevice device) {
+ Log.d(TAG, "setAudioDeviceInternal(device=" + device + ")");
+ AppRTCUtils.assertIsTrue(audioDevices.contains(device));
+
+ switch (device) {
+ case SPEAKER_PHONE:
+ setSpeakerphoneOn(true);
+ break;
+ case EARPIECE:
+ setSpeakerphoneOn(false);
+ break;
+ case WIRED_HEADSET:
+ setSpeakerphoneOn(false);
+ break;
+ case BLUETOOTH:
+ setSpeakerphoneOn(false);
+ break;
+ default:
+ Log.e(TAG, "Invalid audio device selection");
+ break;
+ }
+ selectedAudioDevice = device;
+ }
+
+ /**
+ * Changes default audio device.
+ * TODO(henrika): add usage of this method in the AppRTCMobile client.
+ */
+ public void setDefaultAudioDevice(AudioDevice defaultDevice) {
+ ThreadUtils.checkIsOnMainThread();
+ switch (defaultDevice) {
+ case SPEAKER_PHONE:
+ defaultAudioDevice = defaultDevice;
+ break;
+ case EARPIECE:
+ if (hasEarpiece()) {
+ defaultAudioDevice = defaultDevice;
+ } else {
+ defaultAudioDevice = AudioDevice.SPEAKER_PHONE;
+ }
+ break;
+ default:
+ Log.e(TAG, "Invalid default audio device selection");
+ break;
+ }
+ Log.d(TAG, "setDefaultAudioDevice(device=" + defaultAudioDevice + ")");
+ updateAudioDeviceState();
+ }
+
+ /** Changes selection of the currently active audio device. */
+ public void selectAudioDevice(AudioDevice device) {
+ ThreadUtils.checkIsOnMainThread();
+ if (!audioDevices.contains(device)) {
+ Log.e(TAG, "Can not select " + device + " from available " + audioDevices);
+ }
+ userSelectedAudioDevice = device;
+ updateAudioDeviceState();
+ }
+
+ /** Returns current set of available/selectable audio devices. */
+ public Set<AudioDevice> getAudioDevices() {
+ ThreadUtils.checkIsOnMainThread();
+ return Collections.unmodifiableSet(new HashSet<>(audioDevices));
+ }
+
+ /** Returns the currently selected audio device. */
+ public AudioDevice getSelectedAudioDevice() {
+ ThreadUtils.checkIsOnMainThread();
+ return selectedAudioDevice;
+ }
+
+ /** Helper method for receiver registration. */
+ private void registerReceiver(BroadcastReceiver receiver, IntentFilter filter) {
+ apprtcContext.registerReceiver(receiver, filter);
+ }
+
+ /** Helper method for unregistration of an existing receiver. */
+ private void unregisterReceiver(BroadcastReceiver receiver) {
+ apprtcContext.unregisterReceiver(receiver);
+ }
+
+ /** Sets the speaker phone mode. */
+ private void setSpeakerphoneOn(boolean on) {
+ boolean wasOn = audioManager.isSpeakerphoneOn();
+ if (wasOn == on) {
+ return;
+ }
+ audioManager.setSpeakerphoneOn(on);
+ }
+
+ /** Sets the microphone mute state. */
+ private void setMicrophoneMute(boolean on) {
+ boolean wasMuted = audioManager.isMicrophoneMute();
+ if (wasMuted == on) {
+ return;
+ }
+ audioManager.setMicrophoneMute(on);
+ }
+
+ /** Gets the current earpiece state. */
+ private boolean hasEarpiece() {
+ return apprtcContext.getPackageManager().hasSystemFeature(PackageManager.FEATURE_TELEPHONY);
+ }
+
+ /**
+ * Checks whether a wired headset is connected or not.
+ * This is not a valid indication that audio playback is actually over
+ * the wired headset as audio routing depends on other conditions. We
+ * only use it as an early indicator (during initialization) of an attached
+ * wired headset.
+ */
+ @Deprecated
+ private boolean hasWiredHeadset() {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
+ return audioManager.isWiredHeadsetOn();
+ } else {
+ final AudioDeviceInfo[] devices = audioManager.getDevices(AudioManager.GET_DEVICES_ALL);
+ for (AudioDeviceInfo device : devices) {
+ final int type = device.getType();
+ if (type == AudioDeviceInfo.TYPE_WIRED_HEADSET) {
+ Log.d(TAG, "hasWiredHeadset: found wired headset");
+ return true;
+ } else if (type == AudioDeviceInfo.TYPE_USB_DEVICE) {
+ Log.d(TAG, "hasWiredHeadset: found USB audio device");
+ return true;
+ }
+ }
+ return false;
+ }
+ }
+
+ /**
+ * Updates list of possible audio devices and make new device selection.
+ * TODO(henrika): add unit test to verify all state transitions.
+ */
+ public void updateAudioDeviceState() {
+ ThreadUtils.checkIsOnMainThread();
+ Log.d(TAG, "--- updateAudioDeviceState: "
+ + "wired headset=" + hasWiredHeadset + ", "
+ + "BT state=" + bluetoothManager.getState());
+ Log.d(TAG, "Device status: "
+ + "available=" + audioDevices + ", "
+ + "selected=" + selectedAudioDevice + ", "
+ + "user selected=" + userSelectedAudioDevice);
+
+ // Check if any Bluetooth headset is connected. The internal BT state will
+ // change accordingly.
+ // TODO(henrika): perhaps wrap required state into BT manager.
+ if (bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_AVAILABLE
+ || bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_UNAVAILABLE
+ || bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_DISCONNECTING) {
+ bluetoothManager.updateDevice();
+ }
+
+ // Update the set of available audio devices.
+ Set<AudioDevice> newAudioDevices = new HashSet<>();
+
+ if (bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTED
+ || bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTING
+ || bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_AVAILABLE) {
+ newAudioDevices.add(AudioDevice.BLUETOOTH);
+ }
+
+ if (hasWiredHeadset) {
+ // If a wired headset is connected, then it is the only possible option.
+ newAudioDevices.add(AudioDevice.WIRED_HEADSET);
+ } else {
+ // No wired headset, hence the audio-device list can contain speaker
+ // phone (on a tablet), or speaker phone and earpiece (on mobile phone).
+ newAudioDevices.add(AudioDevice.SPEAKER_PHONE);
+ if (hasEarpiece()) {
+ newAudioDevices.add(AudioDevice.EARPIECE);
+ }
+ }
+ // Store state which is set to true if the device list has changed.
+ boolean audioDeviceSetUpdated = !audioDevices.equals(newAudioDevices);
+ // Update the existing audio device set.
+ audioDevices = newAudioDevices;
+ // Correct user selected audio devices if needed.
+ if (bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_UNAVAILABLE
+ && userSelectedAudioDevice == AudioDevice.BLUETOOTH) {
+ // If BT is not available, it can't be the user selection.
+ userSelectedAudioDevice = AudioDevice.NONE;
+ }
+ if (hasWiredHeadset && userSelectedAudioDevice == AudioDevice.SPEAKER_PHONE) {
+ // If user selected speaker phone, but then plugged wired headset then make
+ // wired headset as user selected device.
+ userSelectedAudioDevice = AudioDevice.WIRED_HEADSET;
+ }
+ if (!hasWiredHeadset && userSelectedAudioDevice == AudioDevice.WIRED_HEADSET) {
+ // If user selected wired headset, but then unplugged wired headset then make
+ // speaker phone as user selected device.
+ userSelectedAudioDevice = AudioDevice.SPEAKER_PHONE;
+ }
+
+ // Need to start Bluetooth if it is available and user either selected it explicitly or
+ // user did not select any output device.
+ boolean needBluetoothAudioStart =
+ bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_AVAILABLE
+ && (userSelectedAudioDevice == AudioDevice.NONE
+ || userSelectedAudioDevice == AudioDevice.BLUETOOTH);
+
+ // Need to stop Bluetooth audio if user selected different device and
+ // Bluetooth SCO connection is established or in the process.
+ boolean needBluetoothAudioStop =
+ (bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTED
+ || bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTING)
+ && (userSelectedAudioDevice != AudioDevice.NONE
+ && userSelectedAudioDevice != AudioDevice.BLUETOOTH);
+
+ if (bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_AVAILABLE
+ || bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTING
+ || bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTED) {
+ Log.d(TAG, "Need BT audio: start=" + needBluetoothAudioStart + ", "
+ + "stop=" + needBluetoothAudioStop + ", "
+ + "BT state=" + bluetoothManager.getState());
+ }
+
+ // Start or stop Bluetooth SCO connection given states set earlier.
+ if (needBluetoothAudioStop) {
+ bluetoothManager.stopScoAudio();
+ bluetoothManager.updateDevice();
+ }
+
+ if (needBluetoothAudioStart && !needBluetoothAudioStop) {
+ // Attempt to start Bluetooth SCO audio (takes a few second to start).
+ if (!bluetoothManager.startScoAudio()) {
+ // Remove BLUETOOTH from list of available devices since SCO failed.
+ audioDevices.remove(AudioDevice.BLUETOOTH);
+ audioDeviceSetUpdated = true;
+ }
+ }
+
+ // Update selected audio device.
+ final AudioDevice newAudioDevice;
+
+ if (bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTED) {
+ // If a Bluetooth is connected, then it should be used as output audio
+ // device. Note that it is not sufficient that a headset is available;
+ // an active SCO channel must also be up and running.
+ newAudioDevice = AudioDevice.BLUETOOTH;
+ } else if (hasWiredHeadset) {
+ // If a wired headset is connected, but Bluetooth is not, then wired headset is used as
+ // audio device.
+ newAudioDevice = AudioDevice.WIRED_HEADSET;
+ } else {
+ // No wired headset and no Bluetooth, hence the audio-device list can contain speaker
+ // phone (on a tablet), or speaker phone and earpiece (on mobile phone).
+ // `defaultAudioDevice` contains either AudioDevice.SPEAKER_PHONE or AudioDevice.EARPIECE
+ // depending on the user's selection.
+ newAudioDevice = defaultAudioDevice;
+ }
+ // Switch to new device but only if there has been any changes.
+ if (newAudioDevice != selectedAudioDevice || audioDeviceSetUpdated) {
+ // Do the required device switch.
+ setAudioDeviceInternal(newAudioDevice);
+ Log.d(TAG, "New device status: "
+ + "available=" + audioDevices + ", "
+ + "selected=" + newAudioDevice);
+ if (audioManagerEvents != null) {
+ // Notify a listening client that audio device has been changed.
+ audioManagerEvents.onAudioDeviceChanged(selectedAudioDevice, audioDevices);
+ }
+ }
+ Log.d(TAG, "--- updateAudioDeviceState done");
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCBluetoothManager.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCBluetoothManager.java
new file mode 100644
index 0000000000..e9077d8bd6
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCBluetoothManager.java
@@ -0,0 +1,532 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.annotation.SuppressLint;
+import android.bluetooth.BluetoothAdapter;
+import android.bluetooth.BluetoothDevice;
+import android.bluetooth.BluetoothHeadset;
+import android.bluetooth.BluetoothProfile;
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.content.pm.PackageManager;
+import android.media.AudioManager;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Process;
+import android.util.Log;
+import androidx.annotation.Nullable;
+import java.util.List;
+import java.util.Set;
+import org.appspot.apprtc.util.AppRTCUtils;
+import org.webrtc.ThreadUtils;
+
+/**
+ * AppRTCProximitySensor manages functions related to Bluetoth devices in the
+ * AppRTC demo.
+ */
+public class AppRTCBluetoothManager {
+ private static final String TAG = "AppRTCBluetoothManager";
+
+ // Timeout interval for starting or stopping audio to a Bluetooth SCO device.
+ private static final int BLUETOOTH_SCO_TIMEOUT_MS = 4000;
+ // Maximum number of SCO connection attempts.
+ private static final int MAX_SCO_CONNECTION_ATTEMPTS = 2;
+
+ // Bluetooth connection state.
+ public enum State {
+ // Bluetooth is not available; no adapter or Bluetooth is off.
+ UNINITIALIZED,
+ // Bluetooth error happened when trying to start Bluetooth.
+ ERROR,
+ // Bluetooth proxy object for the Headset profile exists, but no connected headset devices,
+ // SCO is not started or disconnected.
+ HEADSET_UNAVAILABLE,
+ // Bluetooth proxy object for the Headset profile connected, connected Bluetooth headset
+ // present, but SCO is not started or disconnected.
+ HEADSET_AVAILABLE,
+ // Bluetooth audio SCO connection with remote device is closing.
+ SCO_DISCONNECTING,
+ // Bluetooth audio SCO connection with remote device is initiated.
+ SCO_CONNECTING,
+ // Bluetooth audio SCO connection with remote device is established.
+ SCO_CONNECTED
+ }
+
+ private final Context apprtcContext;
+ private final AppRTCAudioManager apprtcAudioManager;
+ @Nullable
+ private final AudioManager audioManager;
+ private final Handler handler;
+
+ int scoConnectionAttempts;
+ private State bluetoothState;
+ private final BluetoothProfile.ServiceListener bluetoothServiceListener;
+ @Nullable
+ private BluetoothAdapter bluetoothAdapter;
+ @Nullable
+ private BluetoothHeadset bluetoothHeadset;
+ @Nullable
+ private BluetoothDevice bluetoothDevice;
+ private final BroadcastReceiver bluetoothHeadsetReceiver;
+
+ // Runs when the Bluetooth timeout expires. We use that timeout after calling
+ // startScoAudio() or stopScoAudio() because we're not guaranteed to get a
+ // callback after those calls.
+ private final Runnable bluetoothTimeoutRunnable = new Runnable() {
+ @Override
+ public void run() {
+ bluetoothTimeout();
+ }
+ };
+
+ /**
+ * Implementation of an interface that notifies BluetoothProfile IPC clients when they have been
+ * connected to or disconnected from the service.
+ */
+ private class BluetoothServiceListener implements BluetoothProfile.ServiceListener {
+ @Override
+ // Called to notify the client when the proxy object has been connected to the service.
+ // Once we have the profile proxy object, we can use it to monitor the state of the
+ // connection and perform other operations that are relevant to the headset profile.
+ public void onServiceConnected(int profile, BluetoothProfile proxy) {
+ if (profile != BluetoothProfile.HEADSET || bluetoothState == State.UNINITIALIZED) {
+ return;
+ }
+ Log.d(TAG, "BluetoothServiceListener.onServiceConnected: BT state=" + bluetoothState);
+ // Android only supports one connected Bluetooth Headset at a time.
+ bluetoothHeadset = (BluetoothHeadset) proxy;
+ updateAudioDeviceState();
+ Log.d(TAG, "onServiceConnected done: BT state=" + bluetoothState);
+ }
+
+ @Override
+ /** Notifies the client when the proxy object has been disconnected from the service. */
+ public void onServiceDisconnected(int profile) {
+ if (profile != BluetoothProfile.HEADSET || bluetoothState == State.UNINITIALIZED) {
+ return;
+ }
+ Log.d(TAG, "BluetoothServiceListener.onServiceDisconnected: BT state=" + bluetoothState);
+ stopScoAudio();
+ bluetoothHeadset = null;
+ bluetoothDevice = null;
+ bluetoothState = State.HEADSET_UNAVAILABLE;
+ updateAudioDeviceState();
+ Log.d(TAG, "onServiceDisconnected done: BT state=" + bluetoothState);
+ }
+ }
+
+ // Intent broadcast receiver which handles changes in Bluetooth device availability.
+ // Detects headset changes and Bluetooth SCO state changes.
+ private class BluetoothHeadsetBroadcastReceiver extends BroadcastReceiver {
+ @Override
+ public void onReceive(Context context, Intent intent) {
+ if (bluetoothState == State.UNINITIALIZED) {
+ return;
+ }
+ final String action = intent.getAction();
+ // Change in connection state of the Headset profile. Note that the
+ // change does not tell us anything about whether we're streaming
+ // audio to BT over SCO. Typically received when user turns on a BT
+ // headset while audio is active using another audio device.
+ if (action.equals(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED)) {
+ final int state =
+ intent.getIntExtra(BluetoothHeadset.EXTRA_STATE, BluetoothHeadset.STATE_DISCONNECTED);
+ Log.d(TAG, "BluetoothHeadsetBroadcastReceiver.onReceive: "
+ + "a=ACTION_CONNECTION_STATE_CHANGED, "
+ + "s=" + stateToString(state) + ", "
+ + "sb=" + isInitialStickyBroadcast() + ", "
+ + "BT state: " + bluetoothState);
+ if (state == BluetoothHeadset.STATE_CONNECTED) {
+ scoConnectionAttempts = 0;
+ updateAudioDeviceState();
+ } else if (state == BluetoothHeadset.STATE_CONNECTING) {
+ // No action needed.
+ } else if (state == BluetoothHeadset.STATE_DISCONNECTING) {
+ // No action needed.
+ } else if (state == BluetoothHeadset.STATE_DISCONNECTED) {
+ // Bluetooth is probably powered off during the call.
+ stopScoAudio();
+ updateAudioDeviceState();
+ }
+ // Change in the audio (SCO) connection state of the Headset profile.
+ // Typically received after call to startScoAudio() has finalized.
+ } else if (action.equals(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED)) {
+ final int state = intent.getIntExtra(
+ BluetoothHeadset.EXTRA_STATE, BluetoothHeadset.STATE_AUDIO_DISCONNECTED);
+ Log.d(TAG, "BluetoothHeadsetBroadcastReceiver.onReceive: "
+ + "a=ACTION_AUDIO_STATE_CHANGED, "
+ + "s=" + stateToString(state) + ", "
+ + "sb=" + isInitialStickyBroadcast() + ", "
+ + "BT state: " + bluetoothState);
+ if (state == BluetoothHeadset.STATE_AUDIO_CONNECTED) {
+ cancelTimer();
+ if (bluetoothState == State.SCO_CONNECTING) {
+ Log.d(TAG, "+++ Bluetooth audio SCO is now connected");
+ bluetoothState = State.SCO_CONNECTED;
+ scoConnectionAttempts = 0;
+ updateAudioDeviceState();
+ } else {
+ Log.w(TAG, "Unexpected state BluetoothHeadset.STATE_AUDIO_CONNECTED");
+ }
+ } else if (state == BluetoothHeadset.STATE_AUDIO_CONNECTING) {
+ Log.d(TAG, "+++ Bluetooth audio SCO is now connecting...");
+ } else if (state == BluetoothHeadset.STATE_AUDIO_DISCONNECTED) {
+ Log.d(TAG, "+++ Bluetooth audio SCO is now disconnected");
+ if (isInitialStickyBroadcast()) {
+ Log.d(TAG, "Ignore STATE_AUDIO_DISCONNECTED initial sticky broadcast.");
+ return;
+ }
+ updateAudioDeviceState();
+ }
+ }
+ Log.d(TAG, "onReceive done: BT state=" + bluetoothState);
+ }
+ }
+
+ /** Construction. */
+ static AppRTCBluetoothManager create(Context context, AppRTCAudioManager audioManager) {
+ Log.d(TAG, "create" + AppRTCUtils.getThreadInfo());
+ return new AppRTCBluetoothManager(context, audioManager);
+ }
+
+ protected AppRTCBluetoothManager(Context context, AppRTCAudioManager audioManager) {
+ Log.d(TAG, "ctor");
+ ThreadUtils.checkIsOnMainThread();
+ apprtcContext = context;
+ apprtcAudioManager = audioManager;
+ this.audioManager = getAudioManager(context);
+ bluetoothState = State.UNINITIALIZED;
+ bluetoothServiceListener = new BluetoothServiceListener();
+ bluetoothHeadsetReceiver = new BluetoothHeadsetBroadcastReceiver();
+ handler = new Handler(Looper.getMainLooper());
+ }
+
+ /** Returns the internal state. */
+ public State getState() {
+ ThreadUtils.checkIsOnMainThread();
+ return bluetoothState;
+ }
+
+ /**
+ * Activates components required to detect Bluetooth devices and to enable
+ * BT SCO (audio is routed via BT SCO) for the headset profile. The end
+ * state will be HEADSET_UNAVAILABLE but a state machine has started which
+ * will start a state change sequence where the final outcome depends on
+ * if/when the BT headset is enabled.
+ * Example of state change sequence when start() is called while BT device
+ * is connected and enabled:
+ * UNINITIALIZED --> HEADSET_UNAVAILABLE --> HEADSET_AVAILABLE -->
+ * SCO_CONNECTING --> SCO_CONNECTED <==> audio is now routed via BT SCO.
+ * Note that the AppRTCAudioManager is also involved in driving this state
+ * change.
+ */
+ public void start() {
+ ThreadUtils.checkIsOnMainThread();
+ Log.d(TAG, "start");
+ if (!hasPermission(apprtcContext, android.Manifest.permission.BLUETOOTH)) {
+ Log.w(TAG, "Process (pid=" + Process.myPid() + ") lacks BLUETOOTH permission");
+ return;
+ }
+ if (bluetoothState != State.UNINITIALIZED) {
+ Log.w(TAG, "Invalid BT state");
+ return;
+ }
+ bluetoothHeadset = null;
+ bluetoothDevice = null;
+ scoConnectionAttempts = 0;
+ // Get a handle to the default local Bluetooth adapter.
+ bluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
+ if (bluetoothAdapter == null) {
+ Log.w(TAG, "Device does not support Bluetooth");
+ return;
+ }
+ // Ensure that the device supports use of BT SCO audio for off call use cases.
+ if (!audioManager.isBluetoothScoAvailableOffCall()) {
+ Log.e(TAG, "Bluetooth SCO audio is not available off call");
+ return;
+ }
+ logBluetoothAdapterInfo(bluetoothAdapter);
+ // Establish a connection to the HEADSET profile (includes both Bluetooth Headset and
+ // Hands-Free) proxy object and install a listener.
+ if (!getBluetoothProfileProxy(
+ apprtcContext, bluetoothServiceListener, BluetoothProfile.HEADSET)) {
+ Log.e(TAG, "BluetoothAdapter.getProfileProxy(HEADSET) failed");
+ return;
+ }
+ // Register receivers for BluetoothHeadset change notifications.
+ IntentFilter bluetoothHeadsetFilter = new IntentFilter();
+ // Register receiver for change in connection state of the Headset profile.
+ bluetoothHeadsetFilter.addAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED);
+ // Register receiver for change in audio connection state of the Headset profile.
+ bluetoothHeadsetFilter.addAction(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED);
+ registerReceiver(bluetoothHeadsetReceiver, bluetoothHeadsetFilter);
+ Log.d(TAG, "HEADSET profile state: "
+ + stateToString(bluetoothAdapter.getProfileConnectionState(BluetoothProfile.HEADSET)));
+ Log.d(TAG, "Bluetooth proxy for headset profile has started");
+ bluetoothState = State.HEADSET_UNAVAILABLE;
+ Log.d(TAG, "start done: BT state=" + bluetoothState);
+ }
+
+ /** Stops and closes all components related to Bluetooth audio. */
+ public void stop() {
+ ThreadUtils.checkIsOnMainThread();
+ Log.d(TAG, "stop: BT state=" + bluetoothState);
+ if (bluetoothAdapter == null) {
+ return;
+ }
+ // Stop BT SCO connection with remote device if needed.
+ stopScoAudio();
+ // Close down remaining BT resources.
+ if (bluetoothState == State.UNINITIALIZED) {
+ return;
+ }
+ unregisterReceiver(bluetoothHeadsetReceiver);
+ cancelTimer();
+ if (bluetoothHeadset != null) {
+ bluetoothAdapter.closeProfileProxy(BluetoothProfile.HEADSET, bluetoothHeadset);
+ bluetoothHeadset = null;
+ }
+ bluetoothAdapter = null;
+ bluetoothDevice = null;
+ bluetoothState = State.UNINITIALIZED;
+ Log.d(TAG, "stop done: BT state=" + bluetoothState);
+ }
+
+ /**
+ * Starts Bluetooth SCO connection with remote device.
+ * Note that the phone application always has the priority on the usage of the SCO connection
+ * for telephony. If this method is called while the phone is in call it will be ignored.
+ * Similarly, if a call is received or sent while an application is using the SCO connection,
+ * the connection will be lost for the application and NOT returned automatically when the call
+ * ends. Also note that: up to and including API version JELLY_BEAN_MR1, this method initiates a
+ * virtual voice call to the Bluetooth headset. After API version JELLY_BEAN_MR2 only a raw SCO
+ * audio connection is established.
+ * TODO(henrika): should we add support for virtual voice call to BT headset also for JBMR2 and
+ * higher. It might be required to initiates a virtual voice call since many devices do not
+ * accept SCO audio without a "call".
+ */
+ public boolean startScoAudio() {
+ ThreadUtils.checkIsOnMainThread();
+ Log.d(TAG, "startSco: BT state=" + bluetoothState + ", "
+ + "attempts: " + scoConnectionAttempts + ", "
+ + "SCO is on: " + isScoOn());
+ if (scoConnectionAttempts >= MAX_SCO_CONNECTION_ATTEMPTS) {
+ Log.e(TAG, "BT SCO connection fails - no more attempts");
+ return false;
+ }
+ if (bluetoothState != State.HEADSET_AVAILABLE) {
+ Log.e(TAG, "BT SCO connection fails - no headset available");
+ return false;
+ }
+ // Start BT SCO channel and wait for ACTION_AUDIO_STATE_CHANGED.
+ Log.d(TAG, "Starting Bluetooth SCO and waits for ACTION_AUDIO_STATE_CHANGED...");
+ // The SCO connection establishment can take several seconds, hence we cannot rely on the
+ // connection to be available when the method returns but instead register to receive the
+ // intent ACTION_SCO_AUDIO_STATE_UPDATED and wait for the state to be SCO_AUDIO_STATE_CONNECTED.
+ bluetoothState = State.SCO_CONNECTING;
+ audioManager.startBluetoothSco();
+ audioManager.setBluetoothScoOn(true);
+ scoConnectionAttempts++;
+ startTimer();
+ Log.d(TAG, "startScoAudio done: BT state=" + bluetoothState + ", "
+ + "SCO is on: " + isScoOn());
+ return true;
+ }
+
+ /** Stops Bluetooth SCO connection with remote device. */
+ public void stopScoAudio() {
+ ThreadUtils.checkIsOnMainThread();
+ Log.d(TAG, "stopScoAudio: BT state=" + bluetoothState + ", "
+ + "SCO is on: " + isScoOn());
+ if (bluetoothState != State.SCO_CONNECTING && bluetoothState != State.SCO_CONNECTED) {
+ return;
+ }
+ cancelTimer();
+ audioManager.stopBluetoothSco();
+ audioManager.setBluetoothScoOn(false);
+ bluetoothState = State.SCO_DISCONNECTING;
+ Log.d(TAG, "stopScoAudio done: BT state=" + bluetoothState + ", "
+ + "SCO is on: " + isScoOn());
+ }
+
+ /**
+ * Use the BluetoothHeadset proxy object (controls the Bluetooth Headset
+ * Service via IPC) to update the list of connected devices for the HEADSET
+ * profile. The internal state will change to HEADSET_UNAVAILABLE or to
+ * HEADSET_AVAILABLE and `bluetoothDevice` will be mapped to the connected
+ * device if available.
+ */
+ public void updateDevice() {
+ if (bluetoothState == State.UNINITIALIZED || bluetoothHeadset == null) {
+ return;
+ }
+ Log.d(TAG, "updateDevice");
+ // Get connected devices for the headset profile. Returns the set of
+ // devices which are in state STATE_CONNECTED. The BluetoothDevice class
+ // is just a thin wrapper for a Bluetooth hardware address.
+ List<BluetoothDevice> devices = bluetoothHeadset.getConnectedDevices();
+ if (devices.isEmpty()) {
+ bluetoothDevice = null;
+ bluetoothState = State.HEADSET_UNAVAILABLE;
+ Log.d(TAG, "No connected bluetooth headset");
+ } else {
+ // Always use first device in list. Android only supports one device.
+ bluetoothDevice = devices.get(0);
+ bluetoothState = State.HEADSET_AVAILABLE;
+ Log.d(TAG, "Connected bluetooth headset: "
+ + "name=" + bluetoothDevice.getName() + ", "
+ + "state=" + stateToString(bluetoothHeadset.getConnectionState(bluetoothDevice))
+ + ", SCO audio=" + bluetoothHeadset.isAudioConnected(bluetoothDevice));
+ }
+ Log.d(TAG, "updateDevice done: BT state=" + bluetoothState);
+ }
+
+ /**
+ * Stubs for test mocks.
+ */
+ @Nullable
+ protected AudioManager getAudioManager(Context context) {
+ return (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+ }
+
+ protected void registerReceiver(BroadcastReceiver receiver, IntentFilter filter) {
+ apprtcContext.registerReceiver(receiver, filter);
+ }
+
+ protected void unregisterReceiver(BroadcastReceiver receiver) {
+ apprtcContext.unregisterReceiver(receiver);
+ }
+
+ protected boolean getBluetoothProfileProxy(
+ Context context, BluetoothProfile.ServiceListener listener, int profile) {
+ return bluetoothAdapter.getProfileProxy(context, listener, profile);
+ }
+
+ protected boolean hasPermission(Context context, String permission) {
+ return apprtcContext.checkPermission(permission, Process.myPid(), Process.myUid())
+ == PackageManager.PERMISSION_GRANTED;
+ }
+
+ /** Logs the state of the local Bluetooth adapter. */
+ @SuppressLint("HardwareIds")
+ protected void logBluetoothAdapterInfo(BluetoothAdapter localAdapter) {
+ Log.d(TAG, "BluetoothAdapter: "
+ + "enabled=" + localAdapter.isEnabled() + ", "
+ + "state=" + stateToString(localAdapter.getState()) + ", "
+ + "name=" + localAdapter.getName() + ", "
+ + "address=" + localAdapter.getAddress());
+ // Log the set of BluetoothDevice objects that are bonded (paired) to the local adapter.
+ Set<BluetoothDevice> pairedDevices = localAdapter.getBondedDevices();
+ if (!pairedDevices.isEmpty()) {
+ Log.d(TAG, "paired devices:");
+ for (BluetoothDevice device : pairedDevices) {
+ Log.d(TAG, " name=" + device.getName() + ", address=" + device.getAddress());
+ }
+ }
+ }
+
+ /** Ensures that the audio manager updates its list of available audio devices. */
+ private void updateAudioDeviceState() {
+ ThreadUtils.checkIsOnMainThread();
+ Log.d(TAG, "updateAudioDeviceState");
+ apprtcAudioManager.updateAudioDeviceState();
+ }
+
+ /** Starts timer which times out after BLUETOOTH_SCO_TIMEOUT_MS milliseconds. */
+ private void startTimer() {
+ ThreadUtils.checkIsOnMainThread();
+ Log.d(TAG, "startTimer");
+ handler.postDelayed(bluetoothTimeoutRunnable, BLUETOOTH_SCO_TIMEOUT_MS);
+ }
+
+ /** Cancels any outstanding timer tasks. */
+ private void cancelTimer() {
+ ThreadUtils.checkIsOnMainThread();
+ Log.d(TAG, "cancelTimer");
+ handler.removeCallbacks(bluetoothTimeoutRunnable);
+ }
+
+ /**
+ * Called when start of the BT SCO channel takes too long time. Usually
+ * happens when the BT device has been turned on during an ongoing call.
+ */
+ private void bluetoothTimeout() {
+ ThreadUtils.checkIsOnMainThread();
+ if (bluetoothState == State.UNINITIALIZED || bluetoothHeadset == null) {
+ return;
+ }
+ Log.d(TAG, "bluetoothTimeout: BT state=" + bluetoothState + ", "
+ + "attempts: " + scoConnectionAttempts + ", "
+ + "SCO is on: " + isScoOn());
+ if (bluetoothState != State.SCO_CONNECTING) {
+ return;
+ }
+ // Bluetooth SCO should be connecting; check the latest result.
+ boolean scoConnected = false;
+ List<BluetoothDevice> devices = bluetoothHeadset.getConnectedDevices();
+ if (devices.size() > 0) {
+ bluetoothDevice = devices.get(0);
+ if (bluetoothHeadset.isAudioConnected(bluetoothDevice)) {
+ Log.d(TAG, "SCO connected with " + bluetoothDevice.getName());
+ scoConnected = true;
+ } else {
+ Log.d(TAG, "SCO is not connected with " + bluetoothDevice.getName());
+ }
+ }
+ if (scoConnected) {
+ // We thought BT had timed out, but it's actually on; updating state.
+ bluetoothState = State.SCO_CONNECTED;
+ scoConnectionAttempts = 0;
+ } else {
+ // Give up and "cancel" our request by calling stopBluetoothSco().
+ Log.w(TAG, "BT failed to connect after timeout");
+ stopScoAudio();
+ }
+ updateAudioDeviceState();
+ Log.d(TAG, "bluetoothTimeout done: BT state=" + bluetoothState);
+ }
+
+ /** Checks whether audio uses Bluetooth SCO. */
+ private boolean isScoOn() {
+ return audioManager.isBluetoothScoOn();
+ }
+
+ /** Converts BluetoothAdapter states into local string representations. */
+ private String stateToString(int state) {
+ switch (state) {
+ case BluetoothAdapter.STATE_DISCONNECTED:
+ return "DISCONNECTED";
+ case BluetoothAdapter.STATE_CONNECTED:
+ return "CONNECTED";
+ case BluetoothAdapter.STATE_CONNECTING:
+ return "CONNECTING";
+ case BluetoothAdapter.STATE_DISCONNECTING:
+ return "DISCONNECTING";
+ case BluetoothAdapter.STATE_OFF:
+ return "OFF";
+ case BluetoothAdapter.STATE_ON:
+ return "ON";
+ case BluetoothAdapter.STATE_TURNING_OFF:
+ // Indicates the local Bluetooth adapter is turning off. Local clients should immediately
+ // attempt graceful disconnection of any remote links.
+ return "TURNING_OFF";
+ case BluetoothAdapter.STATE_TURNING_ON:
+ // Indicates the local Bluetooth adapter is turning on. However local clients should wait
+ // for STATE_ON before attempting to use the adapter.
+ return "TURNING_ON";
+ default:
+ return "INVALID";
+ }
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCClient.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCClient.java
new file mode 100644
index 0000000000..d5b7b4338e
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCClient.java
@@ -0,0 +1,137 @@
+/*
+ * Copyright 2013 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import org.webrtc.IceCandidate;
+import org.webrtc.PeerConnection;
+import org.webrtc.SessionDescription;
+
+import java.util.List;
+
+/**
+ * AppRTCClient is the interface representing an AppRTC client.
+ */
+public interface AppRTCClient {
+ /**
+ * Struct holding the connection parameters of an AppRTC room.
+ */
+ class RoomConnectionParameters {
+ public final String roomUrl;
+ public final String roomId;
+ public final boolean loopback;
+ public final String urlParameters;
+ public RoomConnectionParameters(
+ String roomUrl, String roomId, boolean loopback, String urlParameters) {
+ this.roomUrl = roomUrl;
+ this.roomId = roomId;
+ this.loopback = loopback;
+ this.urlParameters = urlParameters;
+ }
+ public RoomConnectionParameters(String roomUrl, String roomId, boolean loopback) {
+ this(roomUrl, roomId, loopback, null /* urlParameters */);
+ }
+ }
+
+ /**
+ * Asynchronously connect to an AppRTC room URL using supplied connection
+ * parameters. Once connection is established onConnectedToRoom()
+ * callback with room parameters is invoked.
+ */
+ void connectToRoom(RoomConnectionParameters connectionParameters);
+
+ /**
+ * Send offer SDP to the other participant.
+ */
+ void sendOfferSdp(final SessionDescription sdp);
+
+ /**
+ * Send answer SDP to the other participant.
+ */
+ void sendAnswerSdp(final SessionDescription sdp);
+
+ /**
+ * Send Ice candidate to the other participant.
+ */
+ void sendLocalIceCandidate(final IceCandidate candidate);
+
+ /**
+ * Send removed ICE candidates to the other participant.
+ */
+ void sendLocalIceCandidateRemovals(final IceCandidate[] candidates);
+
+ /**
+ * Disconnect from room.
+ */
+ void disconnectFromRoom();
+
+ /**
+ * Struct holding the signaling parameters of an AppRTC room.
+ */
+ class SignalingParameters {
+ public final List<PeerConnection.IceServer> iceServers;
+ public final boolean initiator;
+ public final String clientId;
+ public final String wssUrl;
+ public final String wssPostUrl;
+ public final SessionDescription offerSdp;
+ public final List<IceCandidate> iceCandidates;
+
+ public SignalingParameters(List<PeerConnection.IceServer> iceServers, boolean initiator,
+ String clientId, String wssUrl, String wssPostUrl, SessionDescription offerSdp,
+ List<IceCandidate> iceCandidates) {
+ this.iceServers = iceServers;
+ this.initiator = initiator;
+ this.clientId = clientId;
+ this.wssUrl = wssUrl;
+ this.wssPostUrl = wssPostUrl;
+ this.offerSdp = offerSdp;
+ this.iceCandidates = iceCandidates;
+ }
+ }
+
+ /**
+ * Callback interface for messages delivered on signaling channel.
+ *
+ * <p>Methods are guaranteed to be invoked on the UI thread of `activity`.
+ */
+ interface SignalingEvents {
+ /**
+ * Callback fired once the room's signaling parameters
+ * SignalingParameters are extracted.
+ */
+ void onConnectedToRoom(final SignalingParameters params);
+
+ /**
+ * Callback fired once remote SDP is received.
+ */
+ void onRemoteDescription(final SessionDescription sdp);
+
+ /**
+ * Callback fired once remote Ice candidate is received.
+ */
+ void onRemoteIceCandidate(final IceCandidate candidate);
+
+ /**
+ * Callback fired once remote Ice candidate removals are received.
+ */
+ void onRemoteIceCandidatesRemoved(final IceCandidate[] candidates);
+
+ /**
+ * Callback fired once channel is closed.
+ */
+ void onChannelClose();
+
+ /**
+ * Callback fired once channel error happened.
+ */
+ void onChannelError(final String description);
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCProximitySensor.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCProximitySensor.java
new file mode 100644
index 0000000000..604e2863d9
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCProximitySensor.java
@@ -0,0 +1,158 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.content.Context;
+import android.hardware.Sensor;
+import android.hardware.SensorEvent;
+import android.hardware.SensorEventListener;
+import android.hardware.SensorManager;
+import android.os.Build;
+import android.util.Log;
+import androidx.annotation.Nullable;
+import org.appspot.apprtc.util.AppRTCUtils;
+import org.webrtc.ThreadUtils;
+
+/**
+ * AppRTCProximitySensor manages functions related to the proximity sensor in
+ * the AppRTC demo.
+ * On most device, the proximity sensor is implemented as a boolean-sensor.
+ * It returns just two values "NEAR" or "FAR". Thresholding is done on the LUX
+ * value i.e. the LUX value of the light sensor is compared with a threshold.
+ * A LUX-value more than the threshold means the proximity sensor returns "FAR".
+ * Anything less than the threshold value and the sensor returns "NEAR".
+ */
+public class AppRTCProximitySensor implements SensorEventListener {
+ private static final String TAG = "AppRTCProximitySensor";
+
+ // This class should be created, started and stopped on one thread
+ // (e.g. the main thread). We use `nonThreadSafe` to ensure that this is
+ // the case. Only active when `DEBUG` is set to true.
+ private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
+
+ private final Runnable onSensorStateListener;
+ private final SensorManager sensorManager;
+ @Nullable private Sensor proximitySensor;
+ private boolean lastStateReportIsNear;
+
+ /** Construction */
+ static AppRTCProximitySensor create(Context context, Runnable sensorStateListener) {
+ return new AppRTCProximitySensor(context, sensorStateListener);
+ }
+
+ private AppRTCProximitySensor(Context context, Runnable sensorStateListener) {
+ Log.d(TAG, "AppRTCProximitySensor" + AppRTCUtils.getThreadInfo());
+ onSensorStateListener = sensorStateListener;
+ sensorManager = ((SensorManager) context.getSystemService(Context.SENSOR_SERVICE));
+ }
+
+ /**
+ * Activate the proximity sensor. Also do initialization if called for the
+ * first time.
+ */
+ public boolean start() {
+ threadChecker.checkIsOnValidThread();
+ Log.d(TAG, "start" + AppRTCUtils.getThreadInfo());
+ if (!initDefaultSensor()) {
+ // Proximity sensor is not supported on this device.
+ return false;
+ }
+ sensorManager.registerListener(this, proximitySensor, SensorManager.SENSOR_DELAY_NORMAL);
+ return true;
+ }
+
+ /** Deactivate the proximity sensor. */
+ public void stop() {
+ threadChecker.checkIsOnValidThread();
+ Log.d(TAG, "stop" + AppRTCUtils.getThreadInfo());
+ if (proximitySensor == null) {
+ return;
+ }
+ sensorManager.unregisterListener(this, proximitySensor);
+ }
+
+ /** Getter for last reported state. Set to true if "near" is reported. */
+ public boolean sensorReportsNearState() {
+ threadChecker.checkIsOnValidThread();
+ return lastStateReportIsNear;
+ }
+
+ @Override
+ public final void onAccuracyChanged(Sensor sensor, int accuracy) {
+ threadChecker.checkIsOnValidThread();
+ AppRTCUtils.assertIsTrue(sensor.getType() == Sensor.TYPE_PROXIMITY);
+ if (accuracy == SensorManager.SENSOR_STATUS_UNRELIABLE) {
+ Log.e(TAG, "The values returned by this sensor cannot be trusted");
+ }
+ }
+
+ @Override
+ public final void onSensorChanged(SensorEvent event) {
+ threadChecker.checkIsOnValidThread();
+ AppRTCUtils.assertIsTrue(event.sensor.getType() == Sensor.TYPE_PROXIMITY);
+ // As a best practice; do as little as possible within this method and
+ // avoid blocking.
+ float distanceInCentimeters = event.values[0];
+ if (distanceInCentimeters < proximitySensor.getMaximumRange()) {
+ Log.d(TAG, "Proximity sensor => NEAR state");
+ lastStateReportIsNear = true;
+ } else {
+ Log.d(TAG, "Proximity sensor => FAR state");
+ lastStateReportIsNear = false;
+ }
+
+ // Report about new state to listening client. Client can then call
+ // sensorReportsNearState() to query the current state (NEAR or FAR).
+ if (onSensorStateListener != null) {
+ onSensorStateListener.run();
+ }
+
+ Log.d(TAG, "onSensorChanged" + AppRTCUtils.getThreadInfo() + ": "
+ + "accuracy=" + event.accuracy + ", timestamp=" + event.timestamp + ", distance="
+ + event.values[0]);
+ }
+
+ /**
+ * Get default proximity sensor if it exists. Tablet devices (e.g. Nexus 7)
+ * does not support this type of sensor and false will be returned in such
+ * cases.
+ */
+ private boolean initDefaultSensor() {
+ if (proximitySensor != null) {
+ return true;
+ }
+ proximitySensor = sensorManager.getDefaultSensor(Sensor.TYPE_PROXIMITY);
+ if (proximitySensor == null) {
+ return false;
+ }
+ logProximitySensorInfo();
+ return true;
+ }
+
+ /** Helper method for logging information about the proximity sensor. */
+ private void logProximitySensorInfo() {
+ if (proximitySensor == null) {
+ return;
+ }
+ StringBuilder info = new StringBuilder("Proximity sensor: ");
+ info.append("name=").append(proximitySensor.getName());
+ info.append(", vendor: ").append(proximitySensor.getVendor());
+ info.append(", power: ").append(proximitySensor.getPower());
+ info.append(", resolution: ").append(proximitySensor.getResolution());
+ info.append(", max range: ").append(proximitySensor.getMaximumRange());
+ info.append(", min delay: ").append(proximitySensor.getMinDelay());
+ info.append(", type: ").append(proximitySensor.getStringType());
+ info.append(", max delay: ").append(proximitySensor.getMaxDelay());
+ info.append(", reporting mode: ").append(proximitySensor.getReportingMode());
+ info.append(", isWakeUpSensor: ").append(proximitySensor.isWakeUpSensor());
+ Log.d(TAG, info.toString());
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CallActivity.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CallActivity.java
new file mode 100644
index 0000000000..eb5ee8289e
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CallActivity.java
@@ -0,0 +1,962 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.annotation.TargetApi;
+import android.app.Activity;
+import android.app.AlertDialog;
+import android.app.FragmentTransaction;
+import android.content.Context;
+import android.content.DialogInterface;
+import android.content.Intent;
+import android.content.pm.PackageManager;
+import android.media.projection.MediaProjection;
+import android.media.projection.MediaProjectionManager;
+import android.net.Uri;
+import android.os.Build;
+import android.os.Bundle;
+import android.os.Handler;
+import android.util.DisplayMetrics;
+import android.util.Log;
+import android.view.View;
+import android.view.Window;
+import android.view.WindowManager;
+import android.view.WindowManager.LayoutParams;
+import android.widget.Toast;
+import androidx.annotation.Nullable;
+import java.io.IOException;
+import java.lang.RuntimeException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+import org.appspot.apprtc.AppRTCAudioManager.AudioDevice;
+import org.appspot.apprtc.AppRTCAudioManager.AudioManagerEvents;
+import org.appspot.apprtc.AppRTCClient.RoomConnectionParameters;
+import org.appspot.apprtc.AppRTCClient.SignalingParameters;
+import org.appspot.apprtc.PeerConnectionClient.DataChannelParameters;
+import org.appspot.apprtc.PeerConnectionClient.PeerConnectionParameters;
+import org.webrtc.Camera1Enumerator;
+import org.webrtc.Camera2Enumerator;
+import org.webrtc.CameraEnumerator;
+import org.webrtc.EglBase;
+import org.webrtc.FileVideoCapturer;
+import org.webrtc.IceCandidate;
+import org.webrtc.Logging;
+import org.webrtc.PeerConnectionFactory;
+import org.webrtc.RTCStatsReport;
+import org.webrtc.RendererCommon.ScalingType;
+import org.webrtc.ScreenCapturerAndroid;
+import org.webrtc.SessionDescription;
+import org.webrtc.SurfaceViewRenderer;
+import org.webrtc.VideoCapturer;
+import org.webrtc.VideoFileRenderer;
+import org.webrtc.VideoFrame;
+import org.webrtc.VideoSink;
+
+/**
+ * Activity for peer connection call setup, call waiting
+ * and call view.
+ */
+public class CallActivity extends Activity implements AppRTCClient.SignalingEvents,
+ PeerConnectionClient.PeerConnectionEvents,
+ CallFragment.OnCallEvents {
+ private static final String TAG = "CallRTCClient";
+
+ public static final String EXTRA_ROOMID = "org.appspot.apprtc.ROOMID";
+ public static final String EXTRA_URLPARAMETERS = "org.appspot.apprtc.URLPARAMETERS";
+ public static final String EXTRA_LOOPBACK = "org.appspot.apprtc.LOOPBACK";
+ public static final String EXTRA_VIDEO_CALL = "org.appspot.apprtc.VIDEO_CALL";
+ public static final String EXTRA_SCREENCAPTURE = "org.appspot.apprtc.SCREENCAPTURE";
+ public static final String EXTRA_CAMERA2 = "org.appspot.apprtc.CAMERA2";
+ public static final String EXTRA_VIDEO_WIDTH = "org.appspot.apprtc.VIDEO_WIDTH";
+ public static final String EXTRA_VIDEO_HEIGHT = "org.appspot.apprtc.VIDEO_HEIGHT";
+ public static final String EXTRA_VIDEO_FPS = "org.appspot.apprtc.VIDEO_FPS";
+ public static final String EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED =
+ "org.appsopt.apprtc.VIDEO_CAPTUREQUALITYSLIDER";
+ public static final String EXTRA_VIDEO_BITRATE = "org.appspot.apprtc.VIDEO_BITRATE";
+ public static final String EXTRA_VIDEOCODEC = "org.appspot.apprtc.VIDEOCODEC";
+ public static final String EXTRA_HWCODEC_ENABLED = "org.appspot.apprtc.HWCODEC";
+ public static final String EXTRA_CAPTURETOTEXTURE_ENABLED = "org.appspot.apprtc.CAPTURETOTEXTURE";
+ public static final String EXTRA_FLEXFEC_ENABLED = "org.appspot.apprtc.FLEXFEC";
+ public static final String EXTRA_AUDIO_BITRATE = "org.appspot.apprtc.AUDIO_BITRATE";
+ public static final String EXTRA_AUDIOCODEC = "org.appspot.apprtc.AUDIOCODEC";
+ public static final String EXTRA_NOAUDIOPROCESSING_ENABLED =
+ "org.appspot.apprtc.NOAUDIOPROCESSING";
+ public static final String EXTRA_AECDUMP_ENABLED = "org.appspot.apprtc.AECDUMP";
+ public static final String EXTRA_SAVE_INPUT_AUDIO_TO_FILE_ENABLED =
+ "org.appspot.apprtc.SAVE_INPUT_AUDIO_TO_FILE";
+ public static final String EXTRA_OPENSLES_ENABLED = "org.appspot.apprtc.OPENSLES";
+ public static final String EXTRA_DISABLE_BUILT_IN_AEC = "org.appspot.apprtc.DISABLE_BUILT_IN_AEC";
+ public static final String EXTRA_DISABLE_BUILT_IN_AGC = "org.appspot.apprtc.DISABLE_BUILT_IN_AGC";
+ public static final String EXTRA_DISABLE_BUILT_IN_NS = "org.appspot.apprtc.DISABLE_BUILT_IN_NS";
+ public static final String EXTRA_DISABLE_WEBRTC_AGC_AND_HPF =
+ "org.appspot.apprtc.DISABLE_WEBRTC_GAIN_CONTROL";
+ public static final String EXTRA_DISPLAY_HUD = "org.appspot.apprtc.DISPLAY_HUD";
+ public static final String EXTRA_TRACING = "org.appspot.apprtc.TRACING";
+ public static final String EXTRA_CMDLINE = "org.appspot.apprtc.CMDLINE";
+ public static final String EXTRA_RUNTIME = "org.appspot.apprtc.RUNTIME";
+ public static final String EXTRA_VIDEO_FILE_AS_CAMERA = "org.appspot.apprtc.VIDEO_FILE_AS_CAMERA";
+ public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE =
+ "org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE";
+ public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH =
+ "org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_WIDTH";
+ public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT =
+ "org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT";
+ public static final String EXTRA_USE_VALUES_FROM_INTENT =
+ "org.appspot.apprtc.USE_VALUES_FROM_INTENT";
+ public static final String EXTRA_DATA_CHANNEL_ENABLED = "org.appspot.apprtc.DATA_CHANNEL_ENABLED";
+ public static final String EXTRA_ORDERED = "org.appspot.apprtc.ORDERED";
+ public static final String EXTRA_MAX_RETRANSMITS_MS = "org.appspot.apprtc.MAX_RETRANSMITS_MS";
+ public static final String EXTRA_MAX_RETRANSMITS = "org.appspot.apprtc.MAX_RETRANSMITS";
+ public static final String EXTRA_PROTOCOL = "org.appspot.apprtc.PROTOCOL";
+ public static final String EXTRA_NEGOTIATED = "org.appspot.apprtc.NEGOTIATED";
+ public static final String EXTRA_ID = "org.appspot.apprtc.ID";
+ public static final String EXTRA_ENABLE_RTCEVENTLOG = "org.appspot.apprtc.ENABLE_RTCEVENTLOG";
+
+ private static final int CAPTURE_PERMISSION_REQUEST_CODE = 1;
+
+ // List of mandatory application permissions.
+ private static final String[] MANDATORY_PERMISSIONS = {"android.permission.MODIFY_AUDIO_SETTINGS",
+ "android.permission.RECORD_AUDIO", "android.permission.INTERNET"};
+
+ // Peer connection statistics callback period in ms.
+ private static final int STAT_CALLBACK_PERIOD = 1000;
+
+ private static class ProxyVideoSink implements VideoSink {
+ private VideoSink target;
+
+ @Override
+ synchronized public void onFrame(VideoFrame frame) {
+ if (target == null) {
+ Logging.d(TAG, "Dropping frame in proxy because target is null.");
+ return;
+ }
+
+ target.onFrame(frame);
+ }
+
+ synchronized public void setTarget(VideoSink target) {
+ this.target = target;
+ }
+ }
+
+ private final ProxyVideoSink remoteProxyRenderer = new ProxyVideoSink();
+ private final ProxyVideoSink localProxyVideoSink = new ProxyVideoSink();
+ @Nullable private PeerConnectionClient peerConnectionClient;
+ @Nullable
+ private AppRTCClient appRtcClient;
+ @Nullable
+ private SignalingParameters signalingParameters;
+ @Nullable private AppRTCAudioManager audioManager;
+ @Nullable
+ private SurfaceViewRenderer pipRenderer;
+ @Nullable
+ private SurfaceViewRenderer fullscreenRenderer;
+ @Nullable
+ private VideoFileRenderer videoFileRenderer;
+ private final List<VideoSink> remoteSinks = new ArrayList<>();
+ private Toast logToast;
+ private boolean commandLineRun;
+ private boolean activityRunning;
+ private RoomConnectionParameters roomConnectionParameters;
+ @Nullable
+ private PeerConnectionParameters peerConnectionParameters;
+ private boolean connected;
+ private boolean isError;
+ private boolean callControlFragmentVisible = true;
+ private long callStartedTimeMs;
+ private boolean micEnabled = true;
+ private boolean screencaptureEnabled;
+ private static Intent mediaProjectionPermissionResultData;
+ private static int mediaProjectionPermissionResultCode;
+ // True if local view is in the fullscreen renderer.
+ private boolean isSwappedFeeds;
+
+ // Controls
+ private CallFragment callFragment;
+ private HudFragment hudFragment;
+ private CpuMonitor cpuMonitor;
+
+ @Override
+ // TODO(bugs.webrtc.org/8580): LayoutParams.FLAG_TURN_SCREEN_ON and
+ // LayoutParams.FLAG_SHOW_WHEN_LOCKED are deprecated.
+ @SuppressWarnings("deprecation")
+ public void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ Thread.setDefaultUncaughtExceptionHandler(new UnhandledExceptionHandler(this));
+
+ // Set window styles for fullscreen-window size. Needs to be done before
+ // adding content.
+ requestWindowFeature(Window.FEATURE_NO_TITLE);
+ getWindow().addFlags(LayoutParams.FLAG_FULLSCREEN | LayoutParams.FLAG_KEEP_SCREEN_ON
+ | LayoutParams.FLAG_SHOW_WHEN_LOCKED | LayoutParams.FLAG_TURN_SCREEN_ON);
+ getWindow().getDecorView().setSystemUiVisibility(getSystemUiVisibility());
+ setContentView(R.layout.activity_call);
+
+ connected = false;
+ signalingParameters = null;
+
+ // Create UI controls.
+ pipRenderer = findViewById(R.id.pip_video_view);
+ fullscreenRenderer = findViewById(R.id.fullscreen_video_view);
+ callFragment = new CallFragment();
+ hudFragment = new HudFragment();
+
+ // Show/hide call control fragment on view click.
+ View.OnClickListener listener = new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ toggleCallControlFragmentVisibility();
+ }
+ };
+
+ // Swap feeds on pip view click.
+ pipRenderer.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ setSwappedFeeds(!isSwappedFeeds);
+ }
+ });
+
+ fullscreenRenderer.setOnClickListener(listener);
+ remoteSinks.add(remoteProxyRenderer);
+
+ final Intent intent = getIntent();
+ final EglBase eglBase = EglBase.create();
+
+ // Create video renderers.
+ pipRenderer.init(eglBase.getEglBaseContext(), null);
+ pipRenderer.setScalingType(ScalingType.SCALE_ASPECT_FIT);
+ String saveRemoteVideoToFile = intent.getStringExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE);
+
+ // When saveRemoteVideoToFile is set we save the video from the remote to a file.
+ if (saveRemoteVideoToFile != null) {
+ int videoOutWidth = intent.getIntExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, 0);
+ int videoOutHeight = intent.getIntExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, 0);
+ try {
+ videoFileRenderer = new VideoFileRenderer(
+ saveRemoteVideoToFile, videoOutWidth, videoOutHeight, eglBase.getEglBaseContext());
+ remoteSinks.add(videoFileRenderer);
+ } catch (IOException e) {
+ throw new RuntimeException(
+ "Failed to open video file for output: " + saveRemoteVideoToFile, e);
+ }
+ }
+ fullscreenRenderer.init(eglBase.getEglBaseContext(), null);
+ fullscreenRenderer.setScalingType(ScalingType.SCALE_ASPECT_FILL);
+
+ pipRenderer.setZOrderMediaOverlay(true);
+ pipRenderer.setEnableHardwareScaler(true /* enabled */);
+ fullscreenRenderer.setEnableHardwareScaler(false /* enabled */);
+ // Start with local feed in fullscreen and swap it to the pip when the call is connected.
+ setSwappedFeeds(true /* isSwappedFeeds */);
+
+ // Check for mandatory permissions.
+ for (String permission : MANDATORY_PERMISSIONS) {
+ if (checkCallingOrSelfPermission(permission) != PackageManager.PERMISSION_GRANTED) {
+ logAndToast("Permission " + permission + " is not granted");
+ setResult(RESULT_CANCELED);
+ finish();
+ return;
+ }
+ }
+
+ Uri roomUri = intent.getData();
+ if (roomUri == null) {
+ logAndToast(getString(R.string.missing_url));
+ Log.e(TAG, "Didn't get any URL in intent!");
+ setResult(RESULT_CANCELED);
+ finish();
+ return;
+ }
+
+ // Get Intent parameters.
+ String roomId = intent.getStringExtra(EXTRA_ROOMID);
+ Log.d(TAG, "Room ID: " + roomId);
+ if (roomId == null || roomId.length() == 0) {
+ logAndToast(getString(R.string.missing_url));
+ Log.e(TAG, "Incorrect room ID in intent!");
+ setResult(RESULT_CANCELED);
+ finish();
+ return;
+ }
+
+ boolean loopback = intent.getBooleanExtra(EXTRA_LOOPBACK, false);
+ boolean tracing = intent.getBooleanExtra(EXTRA_TRACING, false);
+
+ int videoWidth = intent.getIntExtra(EXTRA_VIDEO_WIDTH, 0);
+ int videoHeight = intent.getIntExtra(EXTRA_VIDEO_HEIGHT, 0);
+
+ screencaptureEnabled = intent.getBooleanExtra(EXTRA_SCREENCAPTURE, false);
+ // If capturing format is not specified for screencapture, use screen resolution.
+ if (screencaptureEnabled && videoWidth == 0 && videoHeight == 0) {
+ DisplayMetrics displayMetrics = getDisplayMetrics();
+ videoWidth = displayMetrics.widthPixels;
+ videoHeight = displayMetrics.heightPixels;
+ }
+ DataChannelParameters dataChannelParameters = null;
+ if (intent.getBooleanExtra(EXTRA_DATA_CHANNEL_ENABLED, false)) {
+ dataChannelParameters = new DataChannelParameters(intent.getBooleanExtra(EXTRA_ORDERED, true),
+ intent.getIntExtra(EXTRA_MAX_RETRANSMITS_MS, -1),
+ intent.getIntExtra(EXTRA_MAX_RETRANSMITS, -1), intent.getStringExtra(EXTRA_PROTOCOL),
+ intent.getBooleanExtra(EXTRA_NEGOTIATED, false), intent.getIntExtra(EXTRA_ID, -1));
+ }
+ peerConnectionParameters =
+ new PeerConnectionParameters(intent.getBooleanExtra(EXTRA_VIDEO_CALL, true), loopback,
+ tracing, videoWidth, videoHeight, intent.getIntExtra(EXTRA_VIDEO_FPS, 0),
+ intent.getIntExtra(EXTRA_VIDEO_BITRATE, 0), intent.getStringExtra(EXTRA_VIDEOCODEC),
+ intent.getBooleanExtra(EXTRA_HWCODEC_ENABLED, true),
+ intent.getBooleanExtra(EXTRA_FLEXFEC_ENABLED, false),
+ intent.getIntExtra(EXTRA_AUDIO_BITRATE, 0), intent.getStringExtra(EXTRA_AUDIOCODEC),
+ intent.getBooleanExtra(EXTRA_NOAUDIOPROCESSING_ENABLED, false),
+ intent.getBooleanExtra(EXTRA_AECDUMP_ENABLED, false),
+ intent.getBooleanExtra(EXTRA_SAVE_INPUT_AUDIO_TO_FILE_ENABLED, false),
+ intent.getBooleanExtra(EXTRA_OPENSLES_ENABLED, false),
+ intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_AEC, false),
+ intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_AGC, false),
+ intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_NS, false),
+ intent.getBooleanExtra(EXTRA_DISABLE_WEBRTC_AGC_AND_HPF, false),
+ intent.getBooleanExtra(EXTRA_ENABLE_RTCEVENTLOG, false), dataChannelParameters);
+ commandLineRun = intent.getBooleanExtra(EXTRA_CMDLINE, false);
+ int runTimeMs = intent.getIntExtra(EXTRA_RUNTIME, 0);
+
+ Log.d(TAG, "VIDEO_FILE: '" + intent.getStringExtra(EXTRA_VIDEO_FILE_AS_CAMERA) + "'");
+
+ // Create connection client. Use DirectRTCClient if room name is an IP otherwise use the
+ // standard WebSocketRTCClient.
+ if (loopback || !DirectRTCClient.IP_PATTERN.matcher(roomId).matches()) {
+ appRtcClient = new WebSocketRTCClient(this);
+ } else {
+ Log.i(TAG, "Using DirectRTCClient because room name looks like an IP.");
+ appRtcClient = new DirectRTCClient(this);
+ }
+ // Create connection parameters.
+ String urlParameters = intent.getStringExtra(EXTRA_URLPARAMETERS);
+ roomConnectionParameters =
+ new RoomConnectionParameters(roomUri.toString(), roomId, loopback, urlParameters);
+
+ // Create CPU monitor
+ if (CpuMonitor.isSupported()) {
+ cpuMonitor = new CpuMonitor(this);
+ hudFragment.setCpuMonitor(cpuMonitor);
+ }
+
+ // Send intent arguments to fragments.
+ callFragment.setArguments(intent.getExtras());
+ hudFragment.setArguments(intent.getExtras());
+ // Activate call and HUD fragments and start the call.
+ FragmentTransaction ft = getFragmentManager().beginTransaction();
+ ft.add(R.id.call_fragment_container, callFragment);
+ ft.add(R.id.hud_fragment_container, hudFragment);
+ ft.commit();
+
+ // For command line execution run connection for <runTimeMs> and exit.
+ if (commandLineRun && runTimeMs > 0) {
+ (new Handler()).postDelayed(new Runnable() {
+ @Override
+ public void run() {
+ disconnect();
+ }
+ }, runTimeMs);
+ }
+
+ // Create peer connection client.
+ peerConnectionClient = new PeerConnectionClient(
+ getApplicationContext(), eglBase, peerConnectionParameters, CallActivity.this);
+ PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
+ if (loopback) {
+ options.networkIgnoreMask = 0;
+ }
+ peerConnectionClient.createPeerConnectionFactory(options);
+
+ if (screencaptureEnabled) {
+ startScreenCapture();
+ } else {
+ startCall();
+ }
+ }
+
+ private DisplayMetrics getDisplayMetrics() {
+ DisplayMetrics displayMetrics = new DisplayMetrics();
+ WindowManager windowManager =
+ (WindowManager) getApplication().getSystemService(Context.WINDOW_SERVICE);
+ windowManager.getDefaultDisplay().getRealMetrics(displayMetrics);
+ return displayMetrics;
+ }
+
+ private static int getSystemUiVisibility() {
+ return View.SYSTEM_UI_FLAG_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_FULLSCREEN
+ | View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY;
+ }
+
+ private void startScreenCapture() {
+ MediaProjectionManager mediaProjectionManager =
+ (MediaProjectionManager) getApplication().getSystemService(
+ Context.MEDIA_PROJECTION_SERVICE);
+ startActivityForResult(
+ mediaProjectionManager.createScreenCaptureIntent(), CAPTURE_PERMISSION_REQUEST_CODE);
+ }
+
+ @Override
+ public void onActivityResult(int requestCode, int resultCode, Intent data) {
+ if (requestCode != CAPTURE_PERMISSION_REQUEST_CODE)
+ return;
+ mediaProjectionPermissionResultCode = resultCode;
+ mediaProjectionPermissionResultData = data;
+ startCall();
+ }
+
+ private boolean useCamera2() {
+ return Camera2Enumerator.isSupported(this) && getIntent().getBooleanExtra(EXTRA_CAMERA2, true);
+ }
+
+ private boolean captureToTexture() {
+ return getIntent().getBooleanExtra(EXTRA_CAPTURETOTEXTURE_ENABLED, false);
+ }
+
+ private @Nullable VideoCapturer createCameraCapturer(CameraEnumerator enumerator) {
+ final String[] deviceNames = enumerator.getDeviceNames();
+
+ // First, try to find front facing camera
+ Logging.d(TAG, "Looking for front facing cameras.");
+ for (String deviceName : deviceNames) {
+ if (enumerator.isFrontFacing(deviceName)) {
+ Logging.d(TAG, "Creating front facing camera capturer.");
+ VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
+
+ if (videoCapturer != null) {
+ return videoCapturer;
+ }
+ }
+ }
+
+ // Front facing camera not found, try something else
+ Logging.d(TAG, "Looking for other cameras.");
+ for (String deviceName : deviceNames) {
+ if (!enumerator.isFrontFacing(deviceName)) {
+ Logging.d(TAG, "Creating other camera capturer.");
+ VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
+
+ if (videoCapturer != null) {
+ return videoCapturer;
+ }
+ }
+ }
+
+ return null;
+ }
+
+ private @Nullable VideoCapturer createScreenCapturer() {
+ if (mediaProjectionPermissionResultCode != Activity.RESULT_OK) {
+ reportError("User didn't give permission to capture the screen.");
+ return null;
+ }
+ return new ScreenCapturerAndroid(
+ mediaProjectionPermissionResultData, new MediaProjection.Callback() {
+ @Override
+ public void onStop() {
+ reportError("User revoked permission to capture the screen.");
+ }
+ });
+ }
+
+ // Activity interfaces
+ @Override
+ public void onStop() {
+ super.onStop();
+ activityRunning = false;
+ // Don't stop the video when using screencapture to allow user to show other apps to the remote
+ // end.
+ if (peerConnectionClient != null && !screencaptureEnabled) {
+ peerConnectionClient.stopVideoSource();
+ }
+ if (cpuMonitor != null) {
+ cpuMonitor.pause();
+ }
+ }
+
+ @Override
+ public void onStart() {
+ super.onStart();
+ activityRunning = true;
+ // Video is not paused for screencapture. See onPause.
+ if (peerConnectionClient != null && !screencaptureEnabled) {
+ peerConnectionClient.startVideoSource();
+ }
+ if (cpuMonitor != null) {
+ cpuMonitor.resume();
+ }
+ }
+
+ @Override
+ protected void onDestroy() {
+ Thread.setDefaultUncaughtExceptionHandler(null);
+ disconnect();
+ if (logToast != null) {
+ logToast.cancel();
+ }
+ activityRunning = false;
+ super.onDestroy();
+ }
+
+ // CallFragment.OnCallEvents interface implementation.
+ @Override
+ public void onCallHangUp() {
+ disconnect();
+ }
+
+ @Override
+ public void onCameraSwitch() {
+ if (peerConnectionClient != null) {
+ peerConnectionClient.switchCamera();
+ }
+ }
+
+ @Override
+ public void onVideoScalingSwitch(ScalingType scalingType) {
+ fullscreenRenderer.setScalingType(scalingType);
+ }
+
+ @Override
+ public void onCaptureFormatChange(int width, int height, int framerate) {
+ if (peerConnectionClient != null) {
+ peerConnectionClient.changeCaptureFormat(width, height, framerate);
+ }
+ }
+
+ @Override
+ public boolean onToggleMic() {
+ if (peerConnectionClient != null) {
+ micEnabled = !micEnabled;
+ peerConnectionClient.setAudioEnabled(micEnabled);
+ }
+ return micEnabled;
+ }
+
+ // Helper functions.
+ private void toggleCallControlFragmentVisibility() {
+ if (!connected || !callFragment.isAdded()) {
+ return;
+ }
+ // Show/hide call control fragment
+ callControlFragmentVisible = !callControlFragmentVisible;
+ FragmentTransaction ft = getFragmentManager().beginTransaction();
+ if (callControlFragmentVisible) {
+ ft.show(callFragment);
+ ft.show(hudFragment);
+ } else {
+ ft.hide(callFragment);
+ ft.hide(hudFragment);
+ }
+ ft.setTransition(FragmentTransaction.TRANSIT_FRAGMENT_FADE);
+ ft.commit();
+ }
+
+ private void startCall() {
+ if (appRtcClient == null) {
+ Log.e(TAG, "AppRTC client is not allocated for a call.");
+ return;
+ }
+ callStartedTimeMs = System.currentTimeMillis();
+
+ // Start room connection.
+ logAndToast(getString(R.string.connecting_to, roomConnectionParameters.roomUrl));
+ appRtcClient.connectToRoom(roomConnectionParameters);
+
+ // Create and audio manager that will take care of audio routing,
+ // audio modes, audio device enumeration etc.
+ audioManager = AppRTCAudioManager.create(getApplicationContext());
+ // Store existing audio settings and change audio mode to
+ // MODE_IN_COMMUNICATION for best possible VoIP performance.
+ Log.d(TAG, "Starting the audio manager...");
+ audioManager.start(new AudioManagerEvents() {
+ // This method will be called each time the number of available audio
+ // devices has changed.
+ @Override
+ public void onAudioDeviceChanged(
+ AudioDevice audioDevice, Set<AudioDevice> availableAudioDevices) {
+ onAudioManagerDevicesChanged(audioDevice, availableAudioDevices);
+ }
+ });
+ }
+
+ // Should be called from UI thread
+ private void callConnected() {
+ final long delta = System.currentTimeMillis() - callStartedTimeMs;
+ Log.i(TAG, "Call connected: delay=" + delta + "ms");
+ if (peerConnectionClient == null || isError) {
+ Log.w(TAG, "Call is connected in closed or error state");
+ return;
+ }
+ // Enable statistics callback.
+ peerConnectionClient.enableStatsEvents(true, STAT_CALLBACK_PERIOD);
+ setSwappedFeeds(false /* isSwappedFeeds */);
+ }
+
+ // This method is called when the audio manager reports audio device change,
+ // e.g. from wired headset to speakerphone.
+ private void onAudioManagerDevicesChanged(
+ final AudioDevice device, final Set<AudioDevice> availableDevices) {
+ Log.d(TAG, "onAudioManagerDevicesChanged: " + availableDevices + ", "
+ + "selected: " + device);
+ // TODO(henrika): add callback handler.
+ }
+
+ // Disconnect from remote resources, dispose of local resources, and exit.
+ private void disconnect() {
+ activityRunning = false;
+ remoteProxyRenderer.setTarget(null);
+ localProxyVideoSink.setTarget(null);
+ if (appRtcClient != null) {
+ appRtcClient.disconnectFromRoom();
+ appRtcClient = null;
+ }
+ if (pipRenderer != null) {
+ pipRenderer.release();
+ pipRenderer = null;
+ }
+ if (videoFileRenderer != null) {
+ videoFileRenderer.release();
+ videoFileRenderer = null;
+ }
+ if (fullscreenRenderer != null) {
+ fullscreenRenderer.release();
+ fullscreenRenderer = null;
+ }
+ if (peerConnectionClient != null) {
+ peerConnectionClient.close();
+ peerConnectionClient = null;
+ }
+ if (audioManager != null) {
+ audioManager.stop();
+ audioManager = null;
+ }
+ if (connected && !isError) {
+ setResult(RESULT_OK);
+ } else {
+ setResult(RESULT_CANCELED);
+ }
+ finish();
+ }
+
+ private void disconnectWithErrorMessage(final String errorMessage) {
+ if (commandLineRun || !activityRunning) {
+ Log.e(TAG, "Critical error: " + errorMessage);
+ disconnect();
+ } else {
+ new AlertDialog.Builder(this)
+ .setTitle(getText(R.string.channel_error_title))
+ .setMessage(errorMessage)
+ .setCancelable(false)
+ .setNeutralButton(R.string.ok,
+ new DialogInterface.OnClickListener() {
+ @Override
+ public void onClick(DialogInterface dialog, int id) {
+ dialog.cancel();
+ disconnect();
+ }
+ })
+ .create()
+ .show();
+ }
+ }
+
+ // Log `msg` and Toast about it.
+ private void logAndToast(String msg) {
+ Log.d(TAG, msg);
+ if (logToast != null) {
+ logToast.cancel();
+ }
+ logToast = Toast.makeText(this, msg, Toast.LENGTH_SHORT);
+ logToast.show();
+ }
+
+ private void reportError(final String description) {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ if (!isError) {
+ isError = true;
+ disconnectWithErrorMessage(description);
+ }
+ }
+ });
+ }
+
+ private @Nullable VideoCapturer createVideoCapturer() {
+ final VideoCapturer videoCapturer;
+ String videoFileAsCamera = getIntent().getStringExtra(EXTRA_VIDEO_FILE_AS_CAMERA);
+ if (videoFileAsCamera != null) {
+ try {
+ videoCapturer = new FileVideoCapturer(videoFileAsCamera);
+ } catch (IOException e) {
+ reportError("Failed to open video file for emulated camera");
+ return null;
+ }
+ } else if (screencaptureEnabled) {
+ return createScreenCapturer();
+ } else if (useCamera2()) {
+ if (!captureToTexture()) {
+ reportError(getString(R.string.camera2_texture_only_error));
+ return null;
+ }
+
+ Logging.d(TAG, "Creating capturer using camera2 API.");
+ videoCapturer = createCameraCapturer(new Camera2Enumerator(this));
+ } else {
+ Logging.d(TAG, "Creating capturer using camera1 API.");
+ videoCapturer = createCameraCapturer(new Camera1Enumerator(captureToTexture()));
+ }
+ if (videoCapturer == null) {
+ reportError("Failed to open camera");
+ return null;
+ }
+ return videoCapturer;
+ }
+
+ private void setSwappedFeeds(boolean isSwappedFeeds) {
+ Logging.d(TAG, "setSwappedFeeds: " + isSwappedFeeds);
+ this.isSwappedFeeds = isSwappedFeeds;
+ localProxyVideoSink.setTarget(isSwappedFeeds ? fullscreenRenderer : pipRenderer);
+ remoteProxyRenderer.setTarget(isSwappedFeeds ? pipRenderer : fullscreenRenderer);
+ fullscreenRenderer.setMirror(isSwappedFeeds);
+ pipRenderer.setMirror(!isSwappedFeeds);
+ }
+
+ // -----Implementation of AppRTCClient.AppRTCSignalingEvents ---------------
+ // All callbacks are invoked from websocket signaling looper thread and
+ // are routed to UI thread.
+ private void onConnectedToRoomInternal(final SignalingParameters params) {
+ final long delta = System.currentTimeMillis() - callStartedTimeMs;
+
+ signalingParameters = params;
+ logAndToast("Creating peer connection, delay=" + delta + "ms");
+ VideoCapturer videoCapturer = null;
+ if (peerConnectionParameters.videoCallEnabled) {
+ videoCapturer = createVideoCapturer();
+ }
+ peerConnectionClient.createPeerConnection(
+ localProxyVideoSink, remoteSinks, videoCapturer, signalingParameters);
+
+ if (signalingParameters.initiator) {
+ logAndToast("Creating OFFER...");
+ // Create offer. Offer SDP will be sent to answering client in
+ // PeerConnectionEvents.onLocalDescription event.
+ peerConnectionClient.createOffer();
+ } else {
+ if (params.offerSdp != null) {
+ peerConnectionClient.setRemoteDescription(params.offerSdp);
+ logAndToast("Creating ANSWER...");
+ // Create answer. Answer SDP will be sent to offering client in
+ // PeerConnectionEvents.onLocalDescription event.
+ peerConnectionClient.createAnswer();
+ }
+ if (params.iceCandidates != null) {
+ // Add remote ICE candidates from room.
+ for (IceCandidate iceCandidate : params.iceCandidates) {
+ peerConnectionClient.addRemoteIceCandidate(iceCandidate);
+ }
+ }
+ }
+ }
+
+ @Override
+ public void onConnectedToRoom(final SignalingParameters params) {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ onConnectedToRoomInternal(params);
+ }
+ });
+ }
+
+ @Override
+ public void onRemoteDescription(final SessionDescription desc) {
+ final long delta = System.currentTimeMillis() - callStartedTimeMs;
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ if (peerConnectionClient == null) {
+ Log.e(TAG, "Received remote SDP for non-initilized peer connection.");
+ return;
+ }
+ logAndToast("Received remote " + desc.type + ", delay=" + delta + "ms");
+ peerConnectionClient.setRemoteDescription(desc);
+ if (!signalingParameters.initiator) {
+ logAndToast("Creating ANSWER...");
+ // Create answer. Answer SDP will be sent to offering client in
+ // PeerConnectionEvents.onLocalDescription event.
+ peerConnectionClient.createAnswer();
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onRemoteIceCandidate(final IceCandidate candidate) {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ if (peerConnectionClient == null) {
+ Log.e(TAG, "Received ICE candidate for a non-initialized peer connection.");
+ return;
+ }
+ peerConnectionClient.addRemoteIceCandidate(candidate);
+ }
+ });
+ }
+
+ @Override
+ public void onRemoteIceCandidatesRemoved(final IceCandidate[] candidates) {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ if (peerConnectionClient == null) {
+ Log.e(TAG, "Received ICE candidate removals for a non-initialized peer connection.");
+ return;
+ }
+ peerConnectionClient.removeRemoteIceCandidates(candidates);
+ }
+ });
+ }
+
+ @Override
+ public void onChannelClose() {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ logAndToast("Remote end hung up; dropping PeerConnection");
+ disconnect();
+ }
+ });
+ }
+
+ @Override
+ public void onChannelError(final String description) {
+ reportError(description);
+ }
+
+ // -----Implementation of PeerConnectionClient.PeerConnectionEvents.---------
+ // Send local peer connection SDP and ICE candidates to remote party.
+ // All callbacks are invoked from peer connection client looper thread and
+ // are routed to UI thread.
+ @Override
+ public void onLocalDescription(final SessionDescription desc) {
+ final long delta = System.currentTimeMillis() - callStartedTimeMs;
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ if (appRtcClient != null) {
+ logAndToast("Sending " + desc.type + ", delay=" + delta + "ms");
+ if (signalingParameters.initiator) {
+ appRtcClient.sendOfferSdp(desc);
+ } else {
+ appRtcClient.sendAnswerSdp(desc);
+ }
+ }
+ if (peerConnectionParameters.videoMaxBitrate > 0) {
+ Log.d(TAG, "Set video maximum bitrate: " + peerConnectionParameters.videoMaxBitrate);
+ peerConnectionClient.setVideoMaxBitrate(peerConnectionParameters.videoMaxBitrate);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onIceCandidate(final IceCandidate candidate) {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ if (appRtcClient != null) {
+ appRtcClient.sendLocalIceCandidate(candidate);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onIceCandidatesRemoved(final IceCandidate[] candidates) {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ if (appRtcClient != null) {
+ appRtcClient.sendLocalIceCandidateRemovals(candidates);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onIceConnected() {
+ final long delta = System.currentTimeMillis() - callStartedTimeMs;
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ logAndToast("ICE connected, delay=" + delta + "ms");
+ }
+ });
+ }
+
+ @Override
+ public void onIceDisconnected() {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ logAndToast("ICE disconnected");
+ }
+ });
+ }
+
+ @Override
+ public void onConnected() {
+ final long delta = System.currentTimeMillis() - callStartedTimeMs;
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ logAndToast("DTLS connected, delay=" + delta + "ms");
+ connected = true;
+ callConnected();
+ }
+ });
+ }
+
+ @Override
+ public void onDisconnected() {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ logAndToast("DTLS disconnected");
+ connected = false;
+ disconnect();
+ }
+ });
+ }
+
+ @Override
+ public void onPeerConnectionClosed() {}
+
+ @Override
+ public void onPeerConnectionStatsReady(final RTCStatsReport report) {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ if (!isError && connected) {
+ hudFragment.updateEncoderStatistics(report);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onPeerConnectionError(final String description) {
+ reportError(description);
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CallFragment.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CallFragment.java
new file mode 100644
index 0000000000..0d8bdaa06f
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CallFragment.java
@@ -0,0 +1,137 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.app.Activity;
+import android.app.Fragment;
+import android.os.Bundle;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.ImageButton;
+import android.widget.SeekBar;
+import android.widget.TextView;
+
+import org.webrtc.RendererCommon.ScalingType;
+
+/**
+ * Fragment for call control.
+ */
+public class CallFragment extends Fragment {
+ private TextView contactView;
+ private ImageButton cameraSwitchButton;
+ private ImageButton videoScalingButton;
+ private ImageButton toggleMuteButton;
+ private TextView captureFormatText;
+ private SeekBar captureFormatSlider;
+ private OnCallEvents callEvents;
+ private ScalingType scalingType;
+ private boolean videoCallEnabled = true;
+
+ /**
+ * Call control interface for container activity.
+ */
+ public interface OnCallEvents {
+ void onCallHangUp();
+ void onCameraSwitch();
+ void onVideoScalingSwitch(ScalingType scalingType);
+ void onCaptureFormatChange(int width, int height, int framerate);
+ boolean onToggleMic();
+ }
+
+ @Override
+ public View onCreateView(
+ LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
+ View controlView = inflater.inflate(R.layout.fragment_call, container, false);
+
+ // Create UI controls.
+ contactView = controlView.findViewById(R.id.contact_name_call);
+ ImageButton disconnectButton = controlView.findViewById(R.id.button_call_disconnect);
+ cameraSwitchButton = controlView.findViewById(R.id.button_call_switch_camera);
+ videoScalingButton = controlView.findViewById(R.id.button_call_scaling_mode);
+ toggleMuteButton = controlView.findViewById(R.id.button_call_toggle_mic);
+ captureFormatText = controlView.findViewById(R.id.capture_format_text_call);
+ captureFormatSlider = controlView.findViewById(R.id.capture_format_slider_call);
+
+ // Add buttons click events.
+ disconnectButton.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ callEvents.onCallHangUp();
+ }
+ });
+
+ cameraSwitchButton.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ callEvents.onCameraSwitch();
+ }
+ });
+
+ videoScalingButton.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ if (scalingType == ScalingType.SCALE_ASPECT_FILL) {
+ videoScalingButton.setBackgroundResource(R.drawable.ic_action_full_screen);
+ scalingType = ScalingType.SCALE_ASPECT_FIT;
+ } else {
+ videoScalingButton.setBackgroundResource(R.drawable.ic_action_return_from_full_screen);
+ scalingType = ScalingType.SCALE_ASPECT_FILL;
+ }
+ callEvents.onVideoScalingSwitch(scalingType);
+ }
+ });
+ scalingType = ScalingType.SCALE_ASPECT_FILL;
+
+ toggleMuteButton.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ boolean enabled = callEvents.onToggleMic();
+ toggleMuteButton.setAlpha(enabled ? 1.0f : 0.3f);
+ }
+ });
+
+ return controlView;
+ }
+
+ @Override
+ public void onStart() {
+ super.onStart();
+
+ boolean captureSliderEnabled = false;
+ Bundle args = getArguments();
+ if (args != null) {
+ String contactName = args.getString(CallActivity.EXTRA_ROOMID);
+ contactView.setText(contactName);
+ videoCallEnabled = args.getBoolean(CallActivity.EXTRA_VIDEO_CALL, true);
+ captureSliderEnabled = videoCallEnabled
+ && args.getBoolean(CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED, false);
+ }
+ if (!videoCallEnabled) {
+ cameraSwitchButton.setVisibility(View.INVISIBLE);
+ }
+ if (captureSliderEnabled) {
+ captureFormatSlider.setOnSeekBarChangeListener(
+ new CaptureQualityController(captureFormatText, callEvents));
+ } else {
+ captureFormatText.setVisibility(View.GONE);
+ captureFormatSlider.setVisibility(View.GONE);
+ }
+ }
+
+ // TODO(sakal): Replace with onAttach(Context) once we only support API level 23+.
+ @SuppressWarnings("deprecation")
+ @Override
+ public void onAttach(Activity activity) {
+ super.onAttach(activity);
+ callEvents = (OnCallEvents) activity;
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CaptureQualityController.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CaptureQualityController.java
new file mode 100644
index 0000000000..8a783eca9c
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CaptureQualityController.java
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.widget.SeekBar;
+import android.widget.TextView;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+/**
+ * Control capture format based on a seekbar listener.
+ */
+public class CaptureQualityController implements SeekBar.OnSeekBarChangeListener {
+ private final List<CaptureFormat> formats =
+ Arrays.asList(new CaptureFormat(1280, 720, 0, 30000), new CaptureFormat(960, 540, 0, 30000),
+ new CaptureFormat(640, 480, 0, 30000), new CaptureFormat(480, 360, 0, 30000),
+ new CaptureFormat(320, 240, 0, 30000), new CaptureFormat(256, 144, 0, 30000));
+ // Prioritize framerate below this threshold and resolution above the threshold.
+ private static final int FRAMERATE_THRESHOLD = 15;
+ private TextView captureFormatText;
+ private CallFragment.OnCallEvents callEvents;
+ private int width;
+ private int height;
+ private int framerate;
+ private double targetBandwidth;
+
+ public CaptureQualityController(
+ TextView captureFormatText, CallFragment.OnCallEvents callEvents) {
+ this.captureFormatText = captureFormatText;
+ this.callEvents = callEvents;
+ }
+
+ private final Comparator<CaptureFormat> compareFormats = new Comparator<CaptureFormat>() {
+ @Override
+ public int compare(CaptureFormat first, CaptureFormat second) {
+ int firstFps = calculateFramerate(targetBandwidth, first);
+ int secondFps = calculateFramerate(targetBandwidth, second);
+
+ if ((firstFps >= FRAMERATE_THRESHOLD && secondFps >= FRAMERATE_THRESHOLD)
+ || firstFps == secondFps) {
+ // Compare resolution.
+ return first.width * first.height - second.width * second.height;
+ } else {
+ // Compare fps.
+ return firstFps - secondFps;
+ }
+ }
+ };
+
+ @Override
+ public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
+ if (progress == 0) {
+ width = 0;
+ height = 0;
+ framerate = 0;
+ captureFormatText.setText(R.string.muted);
+ return;
+ }
+
+ // Extract max bandwidth (in millipixels / second).
+ long maxCaptureBandwidth = java.lang.Long.MIN_VALUE;
+ for (CaptureFormat format : formats) {
+ maxCaptureBandwidth =
+ Math.max(maxCaptureBandwidth, (long) format.width * format.height * format.framerate.max);
+ }
+
+ // Fraction between 0 and 1.
+ double bandwidthFraction = (double) progress / 100.0;
+ // Make a log-scale transformation, still between 0 and 1.
+ final double kExpConstant = 3.0;
+ bandwidthFraction =
+ (Math.exp(kExpConstant * bandwidthFraction) - 1) / (Math.exp(kExpConstant) - 1);
+ targetBandwidth = bandwidthFraction * maxCaptureBandwidth;
+
+ // Choose the best format given a target bandwidth.
+ final CaptureFormat bestFormat = Collections.max(formats, compareFormats);
+ width = bestFormat.width;
+ height = bestFormat.height;
+ framerate = calculateFramerate(targetBandwidth, bestFormat);
+ captureFormatText.setText(
+ String.format(captureFormatText.getContext().getString(R.string.format_description), width,
+ height, framerate));
+ }
+
+ @Override
+ public void onStartTrackingTouch(SeekBar seekBar) {}
+
+ @Override
+ public void onStopTrackingTouch(SeekBar seekBar) {
+ callEvents.onCaptureFormatChange(width, height, framerate);
+ }
+
+ // Return the highest frame rate possible based on bandwidth and format.
+ private int calculateFramerate(double bandwidth, CaptureFormat format) {
+ return (int) Math.round(
+ Math.min(format.framerate.max, (int) Math.round(bandwidth / (format.width * format.height)))
+ / 1000.0);
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java
new file mode 100644
index 0000000000..7206c88498
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java
@@ -0,0 +1,666 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.annotation.TargetApi;
+import android.app.Activity;
+import android.app.AlertDialog;
+import android.content.DialogInterface;
+import android.content.Intent;
+import android.content.SharedPreferences;
+import android.content.pm.PackageInfo;
+import android.content.pm.PackageManager;
+import android.net.Uri;
+import android.os.Build;
+import android.os.Bundle;
+import android.preference.PreferenceManager;
+import android.util.Log;
+import android.view.ContextMenu;
+import android.view.KeyEvent;
+import android.view.Menu;
+import android.view.MenuItem;
+import android.view.View;
+import android.view.View.OnClickListener;
+import android.view.inputmethod.EditorInfo;
+import android.webkit.URLUtil;
+import android.widget.AdapterView;
+import android.widget.ArrayAdapter;
+import android.widget.EditText;
+import android.widget.ImageButton;
+import android.widget.ListView;
+import android.widget.TextView;
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.Random;
+import org.json.JSONArray;
+import org.json.JSONException;
+
+/**
+ * Handles the initial setup where the user selects which room to join.
+ */
+public class ConnectActivity extends Activity {
+ private static final String TAG = "ConnectActivity";
+ private static final int CONNECTION_REQUEST = 1;
+ private static final int PERMISSION_REQUEST = 2;
+ private static final int REMOVE_FAVORITE_INDEX = 0;
+ private static boolean commandLineRun;
+
+ private ImageButton addFavoriteButton;
+ private EditText roomEditText;
+ private ListView roomListView;
+ private SharedPreferences sharedPref;
+ private String keyprefResolution;
+ private String keyprefFps;
+ private String keyprefVideoBitrateType;
+ private String keyprefVideoBitrateValue;
+ private String keyprefAudioBitrateType;
+ private String keyprefAudioBitrateValue;
+ private String keyprefRoomServerUrl;
+ private String keyprefRoom;
+ private String keyprefRoomList;
+ private ArrayList<String> roomList;
+ private ArrayAdapter<String> adapter;
+
+ @Override
+ public void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+
+ // Get setting keys.
+ PreferenceManager.setDefaultValues(this, R.xml.preferences, false);
+ sharedPref = PreferenceManager.getDefaultSharedPreferences(this);
+ keyprefResolution = getString(R.string.pref_resolution_key);
+ keyprefFps = getString(R.string.pref_fps_key);
+ keyprefVideoBitrateType = getString(R.string.pref_maxvideobitrate_key);
+ keyprefVideoBitrateValue = getString(R.string.pref_maxvideobitratevalue_key);
+ keyprefAudioBitrateType = getString(R.string.pref_startaudiobitrate_key);
+ keyprefAudioBitrateValue = getString(R.string.pref_startaudiobitratevalue_key);
+ keyprefRoomServerUrl = getString(R.string.pref_room_server_url_key);
+ keyprefRoom = getString(R.string.pref_room_key);
+ keyprefRoomList = getString(R.string.pref_room_list_key);
+
+ setContentView(R.layout.activity_connect);
+
+ roomEditText = findViewById(R.id.room_edittext);
+ roomEditText.setOnEditorActionListener(new TextView.OnEditorActionListener() {
+ @Override
+ public boolean onEditorAction(TextView textView, int i, KeyEvent keyEvent) {
+ if (i == EditorInfo.IME_ACTION_DONE) {
+ addFavoriteButton.performClick();
+ return true;
+ }
+ return false;
+ }
+ });
+ roomEditText.requestFocus();
+
+ roomListView = findViewById(R.id.room_listview);
+ roomListView.setEmptyView(findViewById(android.R.id.empty));
+ roomListView.setOnItemClickListener(roomListClickListener);
+ registerForContextMenu(roomListView);
+ ImageButton connectButton = findViewById(R.id.connect_button);
+ connectButton.setOnClickListener(connectListener);
+ addFavoriteButton = findViewById(R.id.add_favorite_button);
+ addFavoriteButton.setOnClickListener(addFavoriteListener);
+
+ requestPermissions();
+ }
+
+ @Override
+ public boolean onCreateOptionsMenu(Menu menu) {
+ getMenuInflater().inflate(R.menu.connect_menu, menu);
+ return true;
+ }
+
+ @Override
+ public void onCreateContextMenu(ContextMenu menu, View v, ContextMenu.ContextMenuInfo menuInfo) {
+ if (v.getId() == R.id.room_listview) {
+ AdapterView.AdapterContextMenuInfo info = (AdapterView.AdapterContextMenuInfo) menuInfo;
+ menu.setHeaderTitle(roomList.get(info.position));
+ String[] menuItems = getResources().getStringArray(R.array.roomListContextMenu);
+ for (int i = 0; i < menuItems.length; i++) {
+ menu.add(Menu.NONE, i, i, menuItems[i]);
+ }
+ } else {
+ super.onCreateContextMenu(menu, v, menuInfo);
+ }
+ }
+
+ @Override
+ public boolean onContextItemSelected(MenuItem item) {
+ if (item.getItemId() == REMOVE_FAVORITE_INDEX) {
+ AdapterView.AdapterContextMenuInfo info =
+ (AdapterView.AdapterContextMenuInfo) item.getMenuInfo();
+ roomList.remove(info.position);
+ adapter.notifyDataSetChanged();
+ return true;
+ }
+
+ return super.onContextItemSelected(item);
+ }
+
+ @Override
+ public boolean onOptionsItemSelected(MenuItem item) {
+ // Handle presses on the action bar items.
+ if (item.getItemId() == R.id.action_settings) {
+ Intent intent = new Intent(this, SettingsActivity.class);
+ startActivity(intent);
+ return true;
+ } else if (item.getItemId() == R.id.action_loopback) {
+ connectToRoom(null, false, true, false, 0);
+ return true;
+ } else {
+ return super.onOptionsItemSelected(item);
+ }
+ }
+
+ @Override
+ public void onPause() {
+ super.onPause();
+ String room = roomEditText.getText().toString();
+ String roomListJson = new JSONArray(roomList).toString();
+ SharedPreferences.Editor editor = sharedPref.edit();
+ editor.putString(keyprefRoom, room);
+ editor.putString(keyprefRoomList, roomListJson);
+ editor.commit();
+ }
+
+ @Override
+ public void onResume() {
+ super.onResume();
+ String room = sharedPref.getString(keyprefRoom, "");
+ roomEditText.setText(room);
+ roomList = new ArrayList<>();
+ String roomListJson = sharedPref.getString(keyprefRoomList, null);
+ if (roomListJson != null) {
+ try {
+ JSONArray jsonArray = new JSONArray(roomListJson);
+ for (int i = 0; i < jsonArray.length(); i++) {
+ roomList.add(jsonArray.get(i).toString());
+ }
+ } catch (JSONException e) {
+ Log.e(TAG, "Failed to load room list: " + e.toString());
+ }
+ }
+ adapter = new ArrayAdapter<>(this, android.R.layout.simple_list_item_1, roomList);
+ roomListView.setAdapter(adapter);
+ if (adapter.getCount() > 0) {
+ roomListView.requestFocus();
+ roomListView.setItemChecked(0, true);
+ }
+ }
+
+ @Override
+ protected void onActivityResult(int requestCode, int resultCode, Intent data) {
+ if (requestCode == CONNECTION_REQUEST && commandLineRun) {
+ Log.d(TAG, "Return: " + resultCode);
+ setResult(resultCode);
+ commandLineRun = false;
+ finish();
+ }
+ }
+
+ @Override
+ public void onRequestPermissionsResult(
+ int requestCode, String[] permissions, int[] grantResults) {
+ if (requestCode == PERMISSION_REQUEST) {
+ String[] missingPermissions = getMissingPermissions();
+ if (missingPermissions.length != 0) {
+ // User didn't grant all the permissions. Warn that the application might not work
+ // correctly.
+ new AlertDialog.Builder(this)
+ .setMessage(R.string.missing_permissions_try_again)
+ .setPositiveButton(R.string.yes,
+ (dialog, id) -> {
+ // User wants to try giving the permissions again.
+ dialog.cancel();
+ requestPermissions();
+ })
+ .setNegativeButton(R.string.no,
+ (dialog, id) -> {
+ // User doesn't want to give the permissions.
+ dialog.cancel();
+ onPermissionsGranted();
+ })
+ .show();
+ } else {
+ // All permissions granted.
+ onPermissionsGranted();
+ }
+ }
+ }
+
+ private void onPermissionsGranted() {
+ // If an implicit VIEW intent is launching the app, go directly to that URL.
+ final Intent intent = getIntent();
+ if ("android.intent.action.VIEW".equals(intent.getAction()) && !commandLineRun) {
+ boolean loopback = intent.getBooleanExtra(CallActivity.EXTRA_LOOPBACK, false);
+ int runTimeMs = intent.getIntExtra(CallActivity.EXTRA_RUNTIME, 0);
+ boolean useValuesFromIntent =
+ intent.getBooleanExtra(CallActivity.EXTRA_USE_VALUES_FROM_INTENT, false);
+ String room = sharedPref.getString(keyprefRoom, "");
+ connectToRoom(room, true, loopback, useValuesFromIntent, runTimeMs);
+ }
+ }
+
+ @TargetApi(Build.VERSION_CODES.M)
+ private void requestPermissions() {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
+ // Dynamic permissions are not required before Android M.
+ onPermissionsGranted();
+ return;
+ }
+
+ String[] missingPermissions = getMissingPermissions();
+ if (missingPermissions.length != 0) {
+ requestPermissions(missingPermissions, PERMISSION_REQUEST);
+ } else {
+ onPermissionsGranted();
+ }
+ }
+
+ @TargetApi(Build.VERSION_CODES.M)
+ private String[] getMissingPermissions() {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
+ return new String[0];
+ }
+
+ PackageInfo info;
+ try {
+ info = getPackageManager().getPackageInfo(getPackageName(), PackageManager.GET_PERMISSIONS);
+ } catch (PackageManager.NameNotFoundException e) {
+ Log.w(TAG, "Failed to retrieve permissions.");
+ return new String[0];
+ }
+
+ if (info.requestedPermissions == null) {
+ Log.w(TAG, "No requested permissions.");
+ return new String[0];
+ }
+
+ ArrayList<String> missingPermissions = new ArrayList<>();
+ for (int i = 0; i < info.requestedPermissions.length; i++) {
+ if ((info.requestedPermissionsFlags[i] & PackageInfo.REQUESTED_PERMISSION_GRANTED) == 0) {
+ missingPermissions.add(info.requestedPermissions[i]);
+ }
+ }
+ Log.d(TAG, "Missing permissions: " + missingPermissions);
+
+ return missingPermissions.toArray(new String[missingPermissions.size()]);
+ }
+
+ /**
+ * Get a value from the shared preference or from the intent, if it does not
+ * exist the default is used.
+ */
+ @Nullable
+ private String sharedPrefGetString(
+ int attributeId, String intentName, int defaultId, boolean useFromIntent) {
+ String defaultValue = getString(defaultId);
+ if (useFromIntent) {
+ String value = getIntent().getStringExtra(intentName);
+ if (value != null) {
+ return value;
+ }
+ return defaultValue;
+ } else {
+ String attributeName = getString(attributeId);
+ return sharedPref.getString(attributeName, defaultValue);
+ }
+ }
+
+ /**
+ * Get a value from the shared preference or from the intent, if it does not
+ * exist the default is used.
+ */
+ private boolean sharedPrefGetBoolean(
+ int attributeId, String intentName, int defaultId, boolean useFromIntent) {
+ boolean defaultValue = Boolean.parseBoolean(getString(defaultId));
+ if (useFromIntent) {
+ return getIntent().getBooleanExtra(intentName, defaultValue);
+ } else {
+ String attributeName = getString(attributeId);
+ return sharedPref.getBoolean(attributeName, defaultValue);
+ }
+ }
+
+ /**
+ * Get a value from the shared preference or from the intent, if it does not
+ * exist the default is used.
+ */
+ private int sharedPrefGetInteger(
+ int attributeId, String intentName, int defaultId, boolean useFromIntent) {
+ String defaultString = getString(defaultId);
+ int defaultValue = Integer.parseInt(defaultString);
+ if (useFromIntent) {
+ return getIntent().getIntExtra(intentName, defaultValue);
+ } else {
+ String attributeName = getString(attributeId);
+ String value = sharedPref.getString(attributeName, defaultString);
+ try {
+ return Integer.parseInt(value);
+ } catch (NumberFormatException e) {
+ Log.e(TAG, "Wrong setting for: " + attributeName + ":" + value);
+ return defaultValue;
+ }
+ }
+ }
+
+ @SuppressWarnings("StringSplitter")
+ private void connectToRoom(String roomId, boolean commandLineRun, boolean loopback,
+ boolean useValuesFromIntent, int runTimeMs) {
+ ConnectActivity.commandLineRun = commandLineRun;
+
+ // roomId is random for loopback.
+ if (loopback) {
+ roomId = Integer.toString((new Random()).nextInt(100000000));
+ }
+
+ String roomUrl = sharedPref.getString(
+ keyprefRoomServerUrl, getString(R.string.pref_room_server_url_default));
+
+ // Video call enabled flag.
+ boolean videoCallEnabled = sharedPrefGetBoolean(R.string.pref_videocall_key,
+ CallActivity.EXTRA_VIDEO_CALL, R.string.pref_videocall_default, useValuesFromIntent);
+
+ // Use screencapture option.
+ boolean useScreencapture = sharedPrefGetBoolean(R.string.pref_screencapture_key,
+ CallActivity.EXTRA_SCREENCAPTURE, R.string.pref_screencapture_default, useValuesFromIntent);
+
+ // Use Camera2 option.
+ boolean useCamera2 = sharedPrefGetBoolean(R.string.pref_camera2_key, CallActivity.EXTRA_CAMERA2,
+ R.string.pref_camera2_default, useValuesFromIntent);
+
+ // Get default codecs.
+ String videoCodec = sharedPrefGetString(R.string.pref_videocodec_key,
+ CallActivity.EXTRA_VIDEOCODEC, R.string.pref_videocodec_default, useValuesFromIntent);
+ String audioCodec = sharedPrefGetString(R.string.pref_audiocodec_key,
+ CallActivity.EXTRA_AUDIOCODEC, R.string.pref_audiocodec_default, useValuesFromIntent);
+
+ // Check HW codec flag.
+ boolean hwCodec = sharedPrefGetBoolean(R.string.pref_hwcodec_key,
+ CallActivity.EXTRA_HWCODEC_ENABLED, R.string.pref_hwcodec_default, useValuesFromIntent);
+
+ // Check Capture to texture.
+ boolean captureToTexture = sharedPrefGetBoolean(R.string.pref_capturetotexture_key,
+ CallActivity.EXTRA_CAPTURETOTEXTURE_ENABLED, R.string.pref_capturetotexture_default,
+ useValuesFromIntent);
+
+ // Check FlexFEC.
+ boolean flexfecEnabled = sharedPrefGetBoolean(R.string.pref_flexfec_key,
+ CallActivity.EXTRA_FLEXFEC_ENABLED, R.string.pref_flexfec_default, useValuesFromIntent);
+
+ // Check Disable Audio Processing flag.
+ boolean noAudioProcessing = sharedPrefGetBoolean(R.string.pref_noaudioprocessing_key,
+ CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED, R.string.pref_noaudioprocessing_default,
+ useValuesFromIntent);
+
+ boolean aecDump = sharedPrefGetBoolean(R.string.pref_aecdump_key,
+ CallActivity.EXTRA_AECDUMP_ENABLED, R.string.pref_aecdump_default, useValuesFromIntent);
+
+ boolean saveInputAudioToFile =
+ sharedPrefGetBoolean(R.string.pref_enable_save_input_audio_to_file_key,
+ CallActivity.EXTRA_SAVE_INPUT_AUDIO_TO_FILE_ENABLED,
+ R.string.pref_enable_save_input_audio_to_file_default, useValuesFromIntent);
+
+ // Check OpenSL ES enabled flag.
+ boolean useOpenSLES = sharedPrefGetBoolean(R.string.pref_opensles_key,
+ CallActivity.EXTRA_OPENSLES_ENABLED, R.string.pref_opensles_default, useValuesFromIntent);
+
+ // Check Disable built-in AEC flag.
+ boolean disableBuiltInAEC = sharedPrefGetBoolean(R.string.pref_disable_built_in_aec_key,
+ CallActivity.EXTRA_DISABLE_BUILT_IN_AEC, R.string.pref_disable_built_in_aec_default,
+ useValuesFromIntent);
+
+ // Check Disable built-in AGC flag.
+ boolean disableBuiltInAGC = sharedPrefGetBoolean(R.string.pref_disable_built_in_agc_key,
+ CallActivity.EXTRA_DISABLE_BUILT_IN_AGC, R.string.pref_disable_built_in_agc_default,
+ useValuesFromIntent);
+
+ // Check Disable built-in NS flag.
+ boolean disableBuiltInNS = sharedPrefGetBoolean(R.string.pref_disable_built_in_ns_key,
+ CallActivity.EXTRA_DISABLE_BUILT_IN_NS, R.string.pref_disable_built_in_ns_default,
+ useValuesFromIntent);
+
+ // Check Disable gain control
+ boolean disableWebRtcAGCAndHPF = sharedPrefGetBoolean(
+ R.string.pref_disable_webrtc_agc_and_hpf_key, CallActivity.EXTRA_DISABLE_WEBRTC_AGC_AND_HPF,
+ R.string.pref_disable_webrtc_agc_and_hpf_key, useValuesFromIntent);
+
+ // Get video resolution from settings.
+ int videoWidth = 0;
+ int videoHeight = 0;
+ if (useValuesFromIntent) {
+ videoWidth = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_WIDTH, 0);
+ videoHeight = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_HEIGHT, 0);
+ }
+ if (videoWidth == 0 && videoHeight == 0) {
+ String resolution =
+ sharedPref.getString(keyprefResolution, getString(R.string.pref_resolution_default));
+ String[] dimensions = resolution.split("[ x]+");
+ if (dimensions.length == 2) {
+ try {
+ videoWidth = Integer.parseInt(dimensions[0]);
+ videoHeight = Integer.parseInt(dimensions[1]);
+ } catch (NumberFormatException e) {
+ videoWidth = 0;
+ videoHeight = 0;
+ Log.e(TAG, "Wrong video resolution setting: " + resolution);
+ }
+ }
+ }
+
+ // Get camera fps from settings.
+ int cameraFps = 0;
+ if (useValuesFromIntent) {
+ cameraFps = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_FPS, 0);
+ }
+ if (cameraFps == 0) {
+ String fps = sharedPref.getString(keyprefFps, getString(R.string.pref_fps_default));
+ String[] fpsValues = fps.split("[ x]+");
+ if (fpsValues.length == 2) {
+ try {
+ cameraFps = Integer.parseInt(fpsValues[0]);
+ } catch (NumberFormatException e) {
+ cameraFps = 0;
+ Log.e(TAG, "Wrong camera fps setting: " + fps);
+ }
+ }
+ }
+
+ // Check capture quality slider flag.
+ boolean captureQualitySlider = sharedPrefGetBoolean(R.string.pref_capturequalityslider_key,
+ CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED,
+ R.string.pref_capturequalityslider_default, useValuesFromIntent);
+
+ // Get video and audio start bitrate.
+ int videoStartBitrate = 0;
+ if (useValuesFromIntent) {
+ videoStartBitrate = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_BITRATE, 0);
+ }
+ if (videoStartBitrate == 0) {
+ String bitrateTypeDefault = getString(R.string.pref_maxvideobitrate_default);
+ String bitrateType = sharedPref.getString(keyprefVideoBitrateType, bitrateTypeDefault);
+ if (!bitrateType.equals(bitrateTypeDefault)) {
+ String bitrateValue = sharedPref.getString(
+ keyprefVideoBitrateValue, getString(R.string.pref_maxvideobitratevalue_default));
+ videoStartBitrate = Integer.parseInt(bitrateValue);
+ }
+ }
+
+ int audioStartBitrate = 0;
+ if (useValuesFromIntent) {
+ audioStartBitrate = getIntent().getIntExtra(CallActivity.EXTRA_AUDIO_BITRATE, 0);
+ }
+ if (audioStartBitrate == 0) {
+ String bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default);
+ String bitrateType = sharedPref.getString(keyprefAudioBitrateType, bitrateTypeDefault);
+ if (!bitrateType.equals(bitrateTypeDefault)) {
+ String bitrateValue = sharedPref.getString(
+ keyprefAudioBitrateValue, getString(R.string.pref_startaudiobitratevalue_default));
+ audioStartBitrate = Integer.parseInt(bitrateValue);
+ }
+ }
+
+ // Check statistics display option.
+ boolean displayHud = sharedPrefGetBoolean(R.string.pref_displayhud_key,
+ CallActivity.EXTRA_DISPLAY_HUD, R.string.pref_displayhud_default, useValuesFromIntent);
+
+ boolean tracing = sharedPrefGetBoolean(R.string.pref_tracing_key, CallActivity.EXTRA_TRACING,
+ R.string.pref_tracing_default, useValuesFromIntent);
+
+ // Check Enable RtcEventLog.
+ boolean rtcEventLogEnabled = sharedPrefGetBoolean(R.string.pref_enable_rtceventlog_key,
+ CallActivity.EXTRA_ENABLE_RTCEVENTLOG, R.string.pref_enable_rtceventlog_default,
+ useValuesFromIntent);
+
+ // Get datachannel options
+ boolean dataChannelEnabled = sharedPrefGetBoolean(R.string.pref_enable_datachannel_key,
+ CallActivity.EXTRA_DATA_CHANNEL_ENABLED, R.string.pref_enable_datachannel_default,
+ useValuesFromIntent);
+ boolean ordered = sharedPrefGetBoolean(R.string.pref_ordered_key, CallActivity.EXTRA_ORDERED,
+ R.string.pref_ordered_default, useValuesFromIntent);
+ boolean negotiated = sharedPrefGetBoolean(R.string.pref_negotiated_key,
+ CallActivity.EXTRA_NEGOTIATED, R.string.pref_negotiated_default, useValuesFromIntent);
+ int maxRetrMs = sharedPrefGetInteger(R.string.pref_max_retransmit_time_ms_key,
+ CallActivity.EXTRA_MAX_RETRANSMITS_MS, R.string.pref_max_retransmit_time_ms_default,
+ useValuesFromIntent);
+ int maxRetr =
+ sharedPrefGetInteger(R.string.pref_max_retransmits_key, CallActivity.EXTRA_MAX_RETRANSMITS,
+ R.string.pref_max_retransmits_default, useValuesFromIntent);
+ int id = sharedPrefGetInteger(R.string.pref_data_id_key, CallActivity.EXTRA_ID,
+ R.string.pref_data_id_default, useValuesFromIntent);
+ String protocol = sharedPrefGetString(R.string.pref_data_protocol_key,
+ CallActivity.EXTRA_PROTOCOL, R.string.pref_data_protocol_default, useValuesFromIntent);
+
+ // Start AppRTCMobile activity.
+ Log.d(TAG, "Connecting to room " + roomId + " at URL " + roomUrl);
+ if (validateUrl(roomUrl)) {
+ Uri uri = Uri.parse(roomUrl);
+ Intent intent = new Intent(this, CallActivity.class);
+ intent.setData(uri);
+ intent.putExtra(CallActivity.EXTRA_ROOMID, roomId);
+ intent.putExtra(CallActivity.EXTRA_LOOPBACK, loopback);
+ intent.putExtra(CallActivity.EXTRA_VIDEO_CALL, videoCallEnabled);
+ intent.putExtra(CallActivity.EXTRA_SCREENCAPTURE, useScreencapture);
+ intent.putExtra(CallActivity.EXTRA_CAMERA2, useCamera2);
+ intent.putExtra(CallActivity.EXTRA_VIDEO_WIDTH, videoWidth);
+ intent.putExtra(CallActivity.EXTRA_VIDEO_HEIGHT, videoHeight);
+ intent.putExtra(CallActivity.EXTRA_VIDEO_FPS, cameraFps);
+ intent.putExtra(CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED, captureQualitySlider);
+ intent.putExtra(CallActivity.EXTRA_VIDEO_BITRATE, videoStartBitrate);
+ intent.putExtra(CallActivity.EXTRA_VIDEOCODEC, videoCodec);
+ intent.putExtra(CallActivity.EXTRA_HWCODEC_ENABLED, hwCodec);
+ intent.putExtra(CallActivity.EXTRA_CAPTURETOTEXTURE_ENABLED, captureToTexture);
+ intent.putExtra(CallActivity.EXTRA_FLEXFEC_ENABLED, flexfecEnabled);
+ intent.putExtra(CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED, noAudioProcessing);
+ intent.putExtra(CallActivity.EXTRA_AECDUMP_ENABLED, aecDump);
+ intent.putExtra(CallActivity.EXTRA_SAVE_INPUT_AUDIO_TO_FILE_ENABLED, saveInputAudioToFile);
+ intent.putExtra(CallActivity.EXTRA_OPENSLES_ENABLED, useOpenSLES);
+ intent.putExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_AEC, disableBuiltInAEC);
+ intent.putExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_AGC, disableBuiltInAGC);
+ intent.putExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_NS, disableBuiltInNS);
+ intent.putExtra(CallActivity.EXTRA_DISABLE_WEBRTC_AGC_AND_HPF, disableWebRtcAGCAndHPF);
+ intent.putExtra(CallActivity.EXTRA_AUDIO_BITRATE, audioStartBitrate);
+ intent.putExtra(CallActivity.EXTRA_AUDIOCODEC, audioCodec);
+ intent.putExtra(CallActivity.EXTRA_DISPLAY_HUD, displayHud);
+ intent.putExtra(CallActivity.EXTRA_TRACING, tracing);
+ intent.putExtra(CallActivity.EXTRA_ENABLE_RTCEVENTLOG, rtcEventLogEnabled);
+ intent.putExtra(CallActivity.EXTRA_CMDLINE, commandLineRun);
+ intent.putExtra(CallActivity.EXTRA_RUNTIME, runTimeMs);
+ intent.putExtra(CallActivity.EXTRA_DATA_CHANNEL_ENABLED, dataChannelEnabled);
+
+ if (dataChannelEnabled) {
+ intent.putExtra(CallActivity.EXTRA_ORDERED, ordered);
+ intent.putExtra(CallActivity.EXTRA_MAX_RETRANSMITS_MS, maxRetrMs);
+ intent.putExtra(CallActivity.EXTRA_MAX_RETRANSMITS, maxRetr);
+ intent.putExtra(CallActivity.EXTRA_PROTOCOL, protocol);
+ intent.putExtra(CallActivity.EXTRA_NEGOTIATED, negotiated);
+ intent.putExtra(CallActivity.EXTRA_ID, id);
+ }
+
+ if (useValuesFromIntent) {
+ if (getIntent().hasExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA)) {
+ String videoFileAsCamera =
+ getIntent().getStringExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA);
+ intent.putExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA, videoFileAsCamera);
+ }
+
+ if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE)) {
+ String saveRemoteVideoToFile =
+ getIntent().getStringExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE);
+ intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE, saveRemoteVideoToFile);
+ }
+
+ if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH)) {
+ int videoOutWidth =
+ getIntent().getIntExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, 0);
+ intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, videoOutWidth);
+ }
+
+ if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT)) {
+ int videoOutHeight =
+ getIntent().getIntExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, 0);
+ intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, videoOutHeight);
+ }
+ }
+
+ startActivityForResult(intent, CONNECTION_REQUEST);
+ }
+ }
+
+ private boolean validateUrl(String url) {
+ if (URLUtil.isHttpsUrl(url) || URLUtil.isHttpUrl(url)) {
+ return true;
+ }
+
+ new AlertDialog.Builder(this)
+ .setTitle(getText(R.string.invalid_url_title))
+ .setMessage(getString(R.string.invalid_url_text, url))
+ .setCancelable(false)
+ .setNeutralButton(R.string.ok,
+ new DialogInterface.OnClickListener() {
+ @Override
+ public void onClick(DialogInterface dialog, int id) {
+ dialog.cancel();
+ }
+ })
+ .create()
+ .show();
+ return false;
+ }
+
+ private final AdapterView.OnItemClickListener roomListClickListener =
+ new AdapterView.OnItemClickListener() {
+ @Override
+ public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) {
+ String roomId = ((TextView) view).getText().toString();
+ connectToRoom(roomId, false, false, false, 0);
+ }
+ };
+
+ private final OnClickListener addFavoriteListener = new OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ String newRoom = roomEditText.getText().toString();
+ if (newRoom.length() > 0 && !roomList.contains(newRoom)) {
+ adapter.add(newRoom);
+ adapter.notifyDataSetChanged();
+ }
+ }
+ };
+
+ private final OnClickListener connectListener = new OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ connectToRoom(roomEditText.getText().toString(), false, false, false, 0);
+ }
+ };
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CpuMonitor.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CpuMonitor.java
new file mode 100644
index 0000000000..1c64621864
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/CpuMonitor.java
@@ -0,0 +1,521 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.os.BatteryManager;
+import android.os.Build;
+import android.os.SystemClock;
+import android.util.Log;
+import androidx.annotation.Nullable;
+import java.io.BufferedReader;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.nio.charset.Charset;
+import java.util.Arrays;
+import java.util.Scanner;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Simple CPU monitor. The caller creates a CpuMonitor object which can then
+ * be used via sampleCpuUtilization() to collect the percentual use of the
+ * cumulative CPU capacity for all CPUs running at their nominal frequency. 3
+ * values are generated: (1) getCpuCurrent() returns the use since the last
+ * sampleCpuUtilization(), (2) getCpuAvg3() returns the use since 3 prior
+ * calls, and (3) getCpuAvgAll() returns the use over all SAMPLE_SAVE_NUMBER
+ * calls.
+ *
+ * <p>CPUs in Android are often "offline", and while this of course means 0 Hz
+ * as current frequency, in this state we cannot even get their nominal
+ * frequency. We therefore tread carefully, and allow any CPU to be missing.
+ * Missing CPUs are assumed to have the same nominal frequency as any close
+ * lower-numbered CPU, but as soon as it is online, we'll get their proper
+ * frequency and remember it. (Since CPU 0 in practice always seem to be
+ * online, this unidirectional frequency inheritance should be no problem in
+ * practice.)
+ *
+ * <p>Caveats:
+ * o No provision made for zany "turbo" mode, common in the x86 world.
+ * o No provision made for ARM big.LITTLE; if CPU n can switch behind our
+ * back, we might get incorrect estimates.
+ * o This is not thread-safe. To call asynchronously, create different
+ * CpuMonitor objects.
+ *
+ * <p>If we can gather enough info to generate a sensible result,
+ * sampleCpuUtilization returns true. It is designed to never throw an
+ * exception.
+ *
+ * <p>sampleCpuUtilization should not be called too often in its present form,
+ * since then deltas would be small and the percent values would fluctuate and
+ * be unreadable. If it is desirable to call it more often than say once per
+ * second, one would need to increase SAMPLE_SAVE_NUMBER and probably use
+ * Queue<Integer> to avoid copying overhead.
+ *
+ * <p>Known problems:
+ * 1. Nexus 7 devices running Kitkat have a kernel which often output an
+ * incorrect 'idle' field in /proc/stat. The value is close to twice the
+ * correct value, and then returns to back to correct reading. Both when
+ * jumping up and back down we might create faulty CPU load readings.
+ */
+class CpuMonitor {
+ private static final String TAG = "CpuMonitor";
+ private static final int MOVING_AVERAGE_SAMPLES = 5;
+
+ private static final int CPU_STAT_SAMPLE_PERIOD_MS = 2000;
+ private static final int CPU_STAT_LOG_PERIOD_MS = 6000;
+
+ private final Context appContext;
+ // User CPU usage at current frequency.
+ private final MovingAverage userCpuUsage;
+ // System CPU usage at current frequency.
+ private final MovingAverage systemCpuUsage;
+ // Total CPU usage relative to maximum frequency.
+ private final MovingAverage totalCpuUsage;
+ // CPU frequency in percentage from maximum.
+ private final MovingAverage frequencyScale;
+
+ @Nullable
+ private ScheduledExecutorService executor;
+ private long lastStatLogTimeMs;
+ private long[] cpuFreqMax;
+ private int cpusPresent;
+ private int actualCpusPresent;
+ private boolean initialized;
+ private boolean cpuOveruse;
+ private String[] maxPath;
+ private String[] curPath;
+ private double[] curFreqScales;
+ @Nullable
+ private ProcStat lastProcStat;
+
+ private static class ProcStat {
+ final long userTime;
+ final long systemTime;
+ final long idleTime;
+
+ ProcStat(long userTime, long systemTime, long idleTime) {
+ this.userTime = userTime;
+ this.systemTime = systemTime;
+ this.idleTime = idleTime;
+ }
+ }
+
+ private static class MovingAverage {
+ private final int size;
+ private double sum;
+ private double currentValue;
+ private double[] circBuffer;
+ private int circBufferIndex;
+
+ public MovingAverage(int size) {
+ if (size <= 0) {
+ throw new AssertionError("Size value in MovingAverage ctor should be positive.");
+ }
+ this.size = size;
+ circBuffer = new double[size];
+ }
+
+ public void reset() {
+ Arrays.fill(circBuffer, 0);
+ circBufferIndex = 0;
+ sum = 0;
+ currentValue = 0;
+ }
+
+ public void addValue(double value) {
+ sum -= circBuffer[circBufferIndex];
+ circBuffer[circBufferIndex++] = value;
+ currentValue = value;
+ sum += value;
+ if (circBufferIndex >= size) {
+ circBufferIndex = 0;
+ }
+ }
+
+ public double getCurrent() {
+ return currentValue;
+ }
+
+ public double getAverage() {
+ return sum / (double) size;
+ }
+ }
+
+ public static boolean isSupported() {
+ return Build.VERSION.SDK_INT < Build.VERSION_CODES.N;
+ }
+
+ public CpuMonitor(Context context) {
+ if (!isSupported()) {
+ throw new RuntimeException("CpuMonitor is not supported on this Android version.");
+ }
+
+ Log.d(TAG, "CpuMonitor ctor.");
+ appContext = context.getApplicationContext();
+ userCpuUsage = new MovingAverage(MOVING_AVERAGE_SAMPLES);
+ systemCpuUsage = new MovingAverage(MOVING_AVERAGE_SAMPLES);
+ totalCpuUsage = new MovingAverage(MOVING_AVERAGE_SAMPLES);
+ frequencyScale = new MovingAverage(MOVING_AVERAGE_SAMPLES);
+ lastStatLogTimeMs = SystemClock.elapsedRealtime();
+
+ scheduleCpuUtilizationTask();
+ }
+
+ public void pause() {
+ if (executor != null) {
+ Log.d(TAG, "pause");
+ executor.shutdownNow();
+ executor = null;
+ }
+ }
+
+ public void resume() {
+ Log.d(TAG, "resume");
+ resetStat();
+ scheduleCpuUtilizationTask();
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void reset() {
+ if (executor != null) {
+ Log.d(TAG, "reset");
+ resetStat();
+ cpuOveruse = false;
+ }
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized int getCpuUsageCurrent() {
+ return doubleToPercent(userCpuUsage.getCurrent() + systemCpuUsage.getCurrent());
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized int getCpuUsageAverage() {
+ return doubleToPercent(userCpuUsage.getAverage() + systemCpuUsage.getAverage());
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized int getFrequencyScaleAverage() {
+ return doubleToPercent(frequencyScale.getAverage());
+ }
+
+ private void scheduleCpuUtilizationTask() {
+ if (executor != null) {
+ executor.shutdownNow();
+ executor = null;
+ }
+
+ executor = Executors.newSingleThreadScheduledExecutor();
+ @SuppressWarnings("unused") // Prevent downstream linter warnings.
+ Future<?> possiblyIgnoredError = executor.scheduleAtFixedRate(new Runnable() {
+ @Override
+ public void run() {
+ cpuUtilizationTask();
+ }
+ }, 0, CPU_STAT_SAMPLE_PERIOD_MS, TimeUnit.MILLISECONDS);
+ }
+
+ private void cpuUtilizationTask() {
+ boolean cpuMonitorAvailable = sampleCpuUtilization();
+ if (cpuMonitorAvailable
+ && SystemClock.elapsedRealtime() - lastStatLogTimeMs >= CPU_STAT_LOG_PERIOD_MS) {
+ lastStatLogTimeMs = SystemClock.elapsedRealtime();
+ String statString = getStatString();
+ Log.d(TAG, statString);
+ }
+ }
+
+ private void init() {
+ try (FileInputStream fin = new FileInputStream("/sys/devices/system/cpu/present");
+ InputStreamReader streamReader = new InputStreamReader(fin, Charset.forName("UTF-8"));
+ BufferedReader reader = new BufferedReader(streamReader);
+ Scanner scanner = new Scanner(reader).useDelimiter("[-\n]");) {
+ scanner.nextInt(); // Skip leading number 0.
+ cpusPresent = 1 + scanner.nextInt();
+ scanner.close();
+ } catch (FileNotFoundException e) {
+ Log.e(TAG, "Cannot do CPU stats since /sys/devices/system/cpu/present is missing");
+ } catch (IOException e) {
+ Log.e(TAG, "Error closing file");
+ } catch (Exception e) {
+ Log.e(TAG, "Cannot do CPU stats due to /sys/devices/system/cpu/present parsing problem");
+ }
+
+ cpuFreqMax = new long[cpusPresent];
+ maxPath = new String[cpusPresent];
+ curPath = new String[cpusPresent];
+ curFreqScales = new double[cpusPresent];
+ for (int i = 0; i < cpusPresent; i++) {
+ cpuFreqMax[i] = 0; // Frequency "not yet determined".
+ curFreqScales[i] = 0;
+ maxPath[i] = "/sys/devices/system/cpu/cpu" + i + "/cpufreq/cpuinfo_max_freq";
+ curPath[i] = "/sys/devices/system/cpu/cpu" + i + "/cpufreq/scaling_cur_freq";
+ }
+
+ lastProcStat = new ProcStat(0, 0, 0);
+ resetStat();
+
+ initialized = true;
+ }
+
+ private synchronized void resetStat() {
+ userCpuUsage.reset();
+ systemCpuUsage.reset();
+ totalCpuUsage.reset();
+ frequencyScale.reset();
+ lastStatLogTimeMs = SystemClock.elapsedRealtime();
+ }
+
+ private int getBatteryLevel() {
+ // Use sticky broadcast with null receiver to read battery level once only.
+ Intent intent = appContext.registerReceiver(
+ null /* receiver */, new IntentFilter(Intent.ACTION_BATTERY_CHANGED));
+
+ int batteryLevel = 0;
+ int batteryScale = intent.getIntExtra(BatteryManager.EXTRA_SCALE, 100);
+ if (batteryScale > 0) {
+ batteryLevel =
+ (int) (100f * intent.getIntExtra(BatteryManager.EXTRA_LEVEL, 0) / batteryScale);
+ }
+ return batteryLevel;
+ }
+
+ /**
+ * Re-measure CPU use. Call this method at an interval of around 1/s.
+ * This method returns true on success. The fields
+ * cpuCurrent, cpuAvg3, and cpuAvgAll are updated on success, and represents:
+ * cpuCurrent: The CPU use since the last sampleCpuUtilization call.
+ * cpuAvg3: The average CPU over the last 3 calls.
+ * cpuAvgAll: The average CPU over the last SAMPLE_SAVE_NUMBER calls.
+ */
+ private synchronized boolean sampleCpuUtilization() {
+ long lastSeenMaxFreq = 0;
+ long cpuFreqCurSum = 0;
+ long cpuFreqMaxSum = 0;
+
+ if (!initialized) {
+ init();
+ }
+ if (cpusPresent == 0) {
+ return false;
+ }
+
+ actualCpusPresent = 0;
+ for (int i = 0; i < cpusPresent; i++) {
+ /*
+ * For each CPU, attempt to first read its max frequency, then its
+ * current frequency. Once as the max frequency for a CPU is found,
+ * save it in cpuFreqMax[].
+ */
+
+ curFreqScales[i] = 0;
+ if (cpuFreqMax[i] == 0) {
+ // We have never found this CPU's max frequency. Attempt to read it.
+ long cpufreqMax = readFreqFromFile(maxPath[i]);
+ if (cpufreqMax > 0) {
+ Log.d(TAG, "Core " + i + ". Max frequency: " + cpufreqMax);
+ lastSeenMaxFreq = cpufreqMax;
+ cpuFreqMax[i] = cpufreqMax;
+ maxPath[i] = null; // Kill path to free its memory.
+ }
+ } else {
+ lastSeenMaxFreq = cpuFreqMax[i]; // A valid, previously read value.
+ }
+
+ long cpuFreqCur = readFreqFromFile(curPath[i]);
+ if (cpuFreqCur == 0 && lastSeenMaxFreq == 0) {
+ // No current frequency information for this CPU core - ignore it.
+ continue;
+ }
+ if (cpuFreqCur > 0) {
+ actualCpusPresent++;
+ }
+ cpuFreqCurSum += cpuFreqCur;
+
+ /* Here, lastSeenMaxFreq might come from
+ * 1. cpuFreq[i], or
+ * 2. a previous iteration, or
+ * 3. a newly read value, or
+ * 4. hypothetically from the pre-loop dummy.
+ */
+ cpuFreqMaxSum += lastSeenMaxFreq;
+ if (lastSeenMaxFreq > 0) {
+ curFreqScales[i] = (double) cpuFreqCur / lastSeenMaxFreq;
+ }
+ }
+
+ if (cpuFreqCurSum == 0 || cpuFreqMaxSum == 0) {
+ Log.e(TAG, "Could not read max or current frequency for any CPU");
+ return false;
+ }
+
+ /*
+ * Since the cycle counts are for the period between the last invocation
+ * and this present one, we average the percentual CPU frequencies between
+ * now and the beginning of the measurement period. This is significantly
+ * incorrect only if the frequencies have peeked or dropped in between the
+ * invocations.
+ */
+ double currentFrequencyScale = cpuFreqCurSum / (double) cpuFreqMaxSum;
+ if (frequencyScale.getCurrent() > 0) {
+ currentFrequencyScale = (frequencyScale.getCurrent() + currentFrequencyScale) * 0.5;
+ }
+
+ ProcStat procStat = readProcStat();
+ if (procStat == null) {
+ return false;
+ }
+
+ long diffUserTime = procStat.userTime - lastProcStat.userTime;
+ long diffSystemTime = procStat.systemTime - lastProcStat.systemTime;
+ long diffIdleTime = procStat.idleTime - lastProcStat.idleTime;
+ long allTime = diffUserTime + diffSystemTime + diffIdleTime;
+
+ if (currentFrequencyScale == 0 || allTime == 0) {
+ return false;
+ }
+
+ // Update statistics.
+ frequencyScale.addValue(currentFrequencyScale);
+
+ double currentUserCpuUsage = diffUserTime / (double) allTime;
+ userCpuUsage.addValue(currentUserCpuUsage);
+
+ double currentSystemCpuUsage = diffSystemTime / (double) allTime;
+ systemCpuUsage.addValue(currentSystemCpuUsage);
+
+ double currentTotalCpuUsage =
+ (currentUserCpuUsage + currentSystemCpuUsage) * currentFrequencyScale;
+ totalCpuUsage.addValue(currentTotalCpuUsage);
+
+ // Save new measurements for next round's deltas.
+ lastProcStat = procStat;
+
+ return true;
+ }
+
+ private int doubleToPercent(double d) {
+ return (int) (d * 100 + 0.5);
+ }
+
+ private synchronized String getStatString() {
+ StringBuilder stat = new StringBuilder();
+ stat.append("CPU User: ")
+ .append(doubleToPercent(userCpuUsage.getCurrent()))
+ .append("/")
+ .append(doubleToPercent(userCpuUsage.getAverage()))
+ .append(". System: ")
+ .append(doubleToPercent(systemCpuUsage.getCurrent()))
+ .append("/")
+ .append(doubleToPercent(systemCpuUsage.getAverage()))
+ .append(". Freq: ")
+ .append(doubleToPercent(frequencyScale.getCurrent()))
+ .append("/")
+ .append(doubleToPercent(frequencyScale.getAverage()))
+ .append(". Total usage: ")
+ .append(doubleToPercent(totalCpuUsage.getCurrent()))
+ .append("/")
+ .append(doubleToPercent(totalCpuUsage.getAverage()))
+ .append(". Cores: ")
+ .append(actualCpusPresent);
+ stat.append("( ");
+ for (int i = 0; i < cpusPresent; i++) {
+ stat.append(doubleToPercent(curFreqScales[i])).append(" ");
+ }
+ stat.append("). Battery: ").append(getBatteryLevel());
+ if (cpuOveruse) {
+ stat.append(". Overuse.");
+ }
+ return stat.toString();
+ }
+
+ /**
+ * Read a single integer value from the named file. Return the read value
+ * or if an error occurs return 0.
+ */
+ private long readFreqFromFile(String fileName) {
+ long number = 0;
+ try (FileInputStream stream = new FileInputStream(fileName);
+ InputStreamReader streamReader = new InputStreamReader(stream, Charset.forName("UTF-8"));
+ BufferedReader reader = new BufferedReader(streamReader)) {
+ String line = reader.readLine();
+ number = parseLong(line);
+ } catch (FileNotFoundException e) {
+ // CPU core is off, so file with its scaling frequency .../cpufreq/scaling_cur_freq
+ // is not present. This is not an error.
+ } catch (IOException e) {
+ // CPU core is off, so file with its scaling frequency .../cpufreq/scaling_cur_freq
+ // is empty. This is not an error.
+ }
+ return number;
+ }
+
+ private static long parseLong(String value) {
+ long number = 0;
+ try {
+ number = Long.parseLong(value);
+ } catch (NumberFormatException e) {
+ Log.e(TAG, "parseLong error.", e);
+ }
+ return number;
+ }
+
+ /*
+ * Read the current utilization of all CPUs using the cumulative first line
+ * of /proc/stat.
+ */
+ @SuppressWarnings("StringSplitter")
+ private @Nullable ProcStat readProcStat() {
+ long userTime = 0;
+ long systemTime = 0;
+ long idleTime = 0;
+ try (FileInputStream stream = new FileInputStream("/proc/stat");
+ InputStreamReader streamReader = new InputStreamReader(stream, Charset.forName("UTF-8"));
+ BufferedReader reader = new BufferedReader(streamReader)) {
+ // line should contain something like this:
+ // cpu 5093818 271838 3512830 165934119 101374 447076 272086 0 0 0
+ // user nice system idle iowait irq softirq
+ String line = reader.readLine();
+ String[] lines = line.split("\\s+");
+ int length = lines.length;
+ if (length >= 5) {
+ userTime = parseLong(lines[1]); // user
+ userTime += parseLong(lines[2]); // nice
+ systemTime = parseLong(lines[3]); // system
+ idleTime = parseLong(lines[4]); // idle
+ }
+ if (length >= 8) {
+ userTime += parseLong(lines[5]); // iowait
+ systemTime += parseLong(lines[6]); // irq
+ systemTime += parseLong(lines[7]); // softirq
+ }
+ } catch (FileNotFoundException e) {
+ Log.e(TAG, "Cannot open /proc/stat for reading", e);
+ return null;
+ } catch (Exception e) {
+ Log.e(TAG, "Problems parsing /proc/stat", e);
+ return null;
+ }
+ return new ProcStat(userTime, systemTime, idleTime);
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/DirectRTCClient.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/DirectRTCClient.java
new file mode 100644
index 0000000000..1b113e1398
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/DirectRTCClient.java
@@ -0,0 +1,346 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.util.Log;
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.webrtc.IceCandidate;
+import org.webrtc.SessionDescription;
+
+/**
+ * Implementation of AppRTCClient that uses direct TCP connection as the signaling channel.
+ * This eliminates the need for an external server. This class does not support loopback
+ * connections.
+ */
+public class DirectRTCClient implements AppRTCClient, TCPChannelClient.TCPChannelEvents {
+ private static final String TAG = "DirectRTCClient";
+ private static final int DEFAULT_PORT = 8888;
+
+ // Regex pattern used for checking if room id looks like an IP.
+ static final Pattern IP_PATTERN = Pattern.compile("("
+ // IPv4
+ + "((\\d+\\.){3}\\d+)|"
+ // IPv6
+ + "\\[((([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?::"
+ + "(([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?)\\]|"
+ + "\\[(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4})\\]|"
+ // IPv6 without []
+ + "((([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?::(([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?)|"
+ + "(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4})|"
+ // Literals
+ + "localhost"
+ + ")"
+ // Optional port number
+ + "(:(\\d+))?");
+
+ private final ExecutorService executor;
+ private final SignalingEvents events;
+ @Nullable
+ private TCPChannelClient tcpClient;
+ private RoomConnectionParameters connectionParameters;
+
+ private enum ConnectionState { NEW, CONNECTED, CLOSED, ERROR }
+
+ // All alterations of the room state should be done from inside the looper thread.
+ private ConnectionState roomState;
+
+ public DirectRTCClient(SignalingEvents events) {
+ this.events = events;
+
+ executor = Executors.newSingleThreadExecutor();
+ roomState = ConnectionState.NEW;
+ }
+
+ /**
+ * Connects to the room, roomId in connectionsParameters is required. roomId must be a valid
+ * IP address matching IP_PATTERN.
+ */
+ @Override
+ public void connectToRoom(RoomConnectionParameters connectionParameters) {
+ this.connectionParameters = connectionParameters;
+
+ if (connectionParameters.loopback) {
+ reportError("Loopback connections aren't supported by DirectRTCClient.");
+ }
+
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ connectToRoomInternal();
+ }
+ });
+ }
+
+ @Override
+ public void disconnectFromRoom() {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ disconnectFromRoomInternal();
+ }
+ });
+ }
+
+ /**
+ * Connects to the room.
+ *
+ * Runs on the looper thread.
+ */
+ private void connectToRoomInternal() {
+ this.roomState = ConnectionState.NEW;
+
+ String endpoint = connectionParameters.roomId;
+
+ Matcher matcher = IP_PATTERN.matcher(endpoint);
+ if (!matcher.matches()) {
+ reportError("roomId must match IP_PATTERN for DirectRTCClient.");
+ return;
+ }
+
+ String ip = matcher.group(1);
+ String portStr = matcher.group(matcher.groupCount());
+ int port;
+
+ if (portStr != null) {
+ try {
+ port = Integer.parseInt(portStr);
+ } catch (NumberFormatException e) {
+ reportError("Invalid port number: " + portStr);
+ return;
+ }
+ } else {
+ port = DEFAULT_PORT;
+ }
+
+ tcpClient = new TCPChannelClient(executor, this, ip, port);
+ }
+
+ /**
+ * Disconnects from the room.
+ *
+ * Runs on the looper thread.
+ */
+ private void disconnectFromRoomInternal() {
+ roomState = ConnectionState.CLOSED;
+
+ if (tcpClient != null) {
+ tcpClient.disconnect();
+ tcpClient = null;
+ }
+ executor.shutdown();
+ }
+
+ @Override
+ public void sendOfferSdp(final SessionDescription sdp) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ if (roomState != ConnectionState.CONNECTED) {
+ reportError("Sending offer SDP in non connected state.");
+ return;
+ }
+ JSONObject json = new JSONObject();
+ jsonPut(json, "sdp", sdp.description);
+ jsonPut(json, "type", "offer");
+ sendMessage(json.toString());
+ }
+ });
+ }
+
+ @Override
+ public void sendAnswerSdp(final SessionDescription sdp) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ JSONObject json = new JSONObject();
+ jsonPut(json, "sdp", sdp.description);
+ jsonPut(json, "type", "answer");
+ sendMessage(json.toString());
+ }
+ });
+ }
+
+ @Override
+ public void sendLocalIceCandidate(final IceCandidate candidate) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ JSONObject json = new JSONObject();
+ jsonPut(json, "type", "candidate");
+ jsonPut(json, "label", candidate.sdpMLineIndex);
+ jsonPut(json, "id", candidate.sdpMid);
+ jsonPut(json, "candidate", candidate.sdp);
+
+ if (roomState != ConnectionState.CONNECTED) {
+ reportError("Sending ICE candidate in non connected state.");
+ return;
+ }
+ sendMessage(json.toString());
+ }
+ });
+ }
+
+ /** Send removed Ice candidates to the other participant. */
+ @Override
+ public void sendLocalIceCandidateRemovals(final IceCandidate[] candidates) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ JSONObject json = new JSONObject();
+ jsonPut(json, "type", "remove-candidates");
+ JSONArray jsonArray = new JSONArray();
+ for (final IceCandidate candidate : candidates) {
+ jsonArray.put(toJsonCandidate(candidate));
+ }
+ jsonPut(json, "candidates", jsonArray);
+
+ if (roomState != ConnectionState.CONNECTED) {
+ reportError("Sending ICE candidate removals in non connected state.");
+ return;
+ }
+ sendMessage(json.toString());
+ }
+ });
+ }
+
+ // -------------------------------------------------------------------
+ // TCPChannelClient event handlers
+
+ /**
+ * If the client is the server side, this will trigger onConnectedToRoom.
+ */
+ @Override
+ public void onTCPConnected(boolean isServer) {
+ if (isServer) {
+ roomState = ConnectionState.CONNECTED;
+
+ SignalingParameters parameters = new SignalingParameters(
+ // Ice servers are not needed for direct connections.
+ new ArrayList<>(),
+ isServer, // Server side acts as the initiator on direct connections.
+ null, // clientId
+ null, // wssUrl
+ null, // wwsPostUrl
+ null, // offerSdp
+ null // iceCandidates
+ );
+ events.onConnectedToRoom(parameters);
+ }
+ }
+
+ @Override
+ public void onTCPMessage(String msg) {
+ try {
+ JSONObject json = new JSONObject(msg);
+ String type = json.optString("type");
+ if (type.equals("candidate")) {
+ events.onRemoteIceCandidate(toJavaCandidate(json));
+ } else if (type.equals("remove-candidates")) {
+ JSONArray candidateArray = json.getJSONArray("candidates");
+ IceCandidate[] candidates = new IceCandidate[candidateArray.length()];
+ for (int i = 0; i < candidateArray.length(); ++i) {
+ candidates[i] = toJavaCandidate(candidateArray.getJSONObject(i));
+ }
+ events.onRemoteIceCandidatesRemoved(candidates);
+ } else if (type.equals("answer")) {
+ SessionDescription sdp = new SessionDescription(
+ SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
+ events.onRemoteDescription(sdp);
+ } else if (type.equals("offer")) {
+ SessionDescription sdp = new SessionDescription(
+ SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
+
+ SignalingParameters parameters = new SignalingParameters(
+ // Ice servers are not needed for direct connections.
+ new ArrayList<>(),
+ false, // This code will only be run on the client side. So, we are not the initiator.
+ null, // clientId
+ null, // wssUrl
+ null, // wssPostUrl
+ sdp, // offerSdp
+ null // iceCandidates
+ );
+ roomState = ConnectionState.CONNECTED;
+ events.onConnectedToRoom(parameters);
+ } else {
+ reportError("Unexpected TCP message: " + msg);
+ }
+ } catch (JSONException e) {
+ reportError("TCP message JSON parsing error: " + e.toString());
+ }
+ }
+
+ @Override
+ public void onTCPError(String description) {
+ reportError("TCP connection error: " + description);
+ }
+
+ @Override
+ public void onTCPClose() {
+ events.onChannelClose();
+ }
+
+ // --------------------------------------------------------------------
+ // Helper functions.
+ private void reportError(final String errorMessage) {
+ Log.e(TAG, errorMessage);
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ if (roomState != ConnectionState.ERROR) {
+ roomState = ConnectionState.ERROR;
+ events.onChannelError(errorMessage);
+ }
+ }
+ });
+ }
+
+ private void sendMessage(final String message) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ tcpClient.send(message);
+ }
+ });
+ }
+
+ // Put a `key`->`value` mapping in `json`.
+ private static void jsonPut(JSONObject json, String key, Object value) {
+ try {
+ json.put(key, value);
+ } catch (JSONException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ // Converts a Java candidate to a JSONObject.
+ private static JSONObject toJsonCandidate(final IceCandidate candidate) {
+ JSONObject json = new JSONObject();
+ jsonPut(json, "label", candidate.sdpMLineIndex);
+ jsonPut(json, "id", candidate.sdpMid);
+ jsonPut(json, "candidate", candidate.sdp);
+ return json;
+ }
+
+ // Converts a JSON candidate to a Java object.
+ private static IceCandidate toJavaCandidate(JSONObject json) throws JSONException {
+ return new IceCandidate(
+ json.getString("id"), json.getInt("label"), json.getString("candidate"));
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/HudFragment.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/HudFragment.java
new file mode 100644
index 0000000000..94ca05549a
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/HudFragment.java
@@ -0,0 +1,102 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.app.Fragment;
+import android.os.Bundle;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.ImageButton;
+import android.widget.TextView;
+import org.webrtc.RTCStats;
+import org.webrtc.RTCStatsReport;
+
+/**
+ * Fragment for HUD statistics display.
+ */
+public class HudFragment extends Fragment {
+ private TextView statView;
+ private ImageButton toggleDebugButton;
+ private boolean displayHud;
+ private volatile boolean isRunning;
+ private CpuMonitor cpuMonitor;
+
+ @Override
+ public View onCreateView(
+ LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
+ View controlView = inflater.inflate(R.layout.fragment_hud, container, false);
+
+ // Create UI controls.
+ statView = controlView.findViewById(R.id.hud_stat_call);
+ toggleDebugButton = controlView.findViewById(R.id.button_toggle_debug);
+
+ toggleDebugButton.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ if (displayHud) {
+ statView.setVisibility(
+ statView.getVisibility() == View.VISIBLE ? View.INVISIBLE : View.VISIBLE);
+ }
+ }
+ });
+
+ return controlView;
+ }
+
+ @Override
+ public void onStart() {
+ super.onStart();
+
+ Bundle args = getArguments();
+ if (args != null) {
+ displayHud = args.getBoolean(CallActivity.EXTRA_DISPLAY_HUD, false);
+ }
+ int visibility = displayHud ? View.VISIBLE : View.INVISIBLE;
+ statView.setVisibility(View.INVISIBLE);
+ toggleDebugButton.setVisibility(visibility);
+ isRunning = true;
+ }
+
+ @Override
+ public void onStop() {
+ isRunning = false;
+ super.onStop();
+ }
+
+ public void setCpuMonitor(CpuMonitor cpuMonitor) {
+ this.cpuMonitor = cpuMonitor;
+ }
+
+ public void updateEncoderStatistics(final RTCStatsReport report) {
+ if (!isRunning || !displayHud) {
+ return;
+ }
+
+ StringBuilder sb = new StringBuilder();
+
+ if (cpuMonitor != null) {
+ sb.append("CPU%: ")
+ .append(cpuMonitor.getCpuUsageCurrent())
+ .append("/")
+ .append(cpuMonitor.getCpuUsageAverage())
+ .append(". Freq: ")
+ .append(cpuMonitor.getFrequencyScaleAverage())
+ .append("\n");
+ }
+
+ for (RTCStats stat : report.getStatsMap().values()) {
+ sb.append(stat.toString()).append("\n");
+ }
+
+ statView.setText(sb.toString());
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java
new file mode 100644
index 0000000000..398b0c3b5e
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java
@@ -0,0 +1,1400 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.content.Context;
+import android.os.Environment;
+import android.os.ParcelFileDescriptor;
+import android.util.Log;
+import androidx.annotation.Nullable;
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.charset.Charset;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+import java.util.Timer;
+import java.util.TimerTask;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import org.appspot.apprtc.AppRTCClient.SignalingParameters;
+import org.appspot.apprtc.RecordedAudioToFileController;
+import org.webrtc.AddIceObserver;
+import org.webrtc.AudioSource;
+import org.webrtc.AudioTrack;
+import org.webrtc.CameraVideoCapturer;
+import org.webrtc.CandidatePairChangeEvent;
+import org.webrtc.DataChannel;
+import org.webrtc.DefaultVideoDecoderFactory;
+import org.webrtc.DefaultVideoEncoderFactory;
+import org.webrtc.EglBase;
+import org.webrtc.IceCandidate;
+import org.webrtc.IceCandidateErrorEvent;
+import org.webrtc.Logging;
+import org.webrtc.MediaConstraints;
+import org.webrtc.MediaStream;
+import org.webrtc.MediaStreamTrack;
+import org.webrtc.PeerConnection;
+import org.webrtc.PeerConnection.IceConnectionState;
+import org.webrtc.PeerConnection.PeerConnectionState;
+import org.webrtc.PeerConnectionFactory;
+import org.webrtc.RTCStatsCollectorCallback;
+import org.webrtc.RTCStatsReport;
+import org.webrtc.RtpParameters;
+import org.webrtc.RtpReceiver;
+import org.webrtc.RtpSender;
+import org.webrtc.RtpTransceiver;
+import org.webrtc.SdpObserver;
+import org.webrtc.SessionDescription;
+import org.webrtc.SoftwareVideoDecoderFactory;
+import org.webrtc.SoftwareVideoEncoderFactory;
+import org.webrtc.SurfaceTextureHelper;
+import org.webrtc.VideoCapturer;
+import org.webrtc.VideoDecoderFactory;
+import org.webrtc.VideoEncoderFactory;
+import org.webrtc.VideoSink;
+import org.webrtc.VideoSource;
+import org.webrtc.VideoTrack;
+import org.webrtc.audio.AudioDeviceModule;
+import org.webrtc.audio.JavaAudioDeviceModule;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordErrorCallback;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStateCallback;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackErrorCallback;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStateCallback;
+
+/**
+ * Peer connection client implementation.
+ *
+ * <p>All public methods are routed to local looper thread.
+ * All PeerConnectionEvents callbacks are invoked from the same looper thread.
+ * This class is a singleton.
+ */
+public class PeerConnectionClient {
+ public static final String VIDEO_TRACK_ID = "ARDAMSv0";
+ public static final String AUDIO_TRACK_ID = "ARDAMSa0";
+ public static final String VIDEO_TRACK_TYPE = "video";
+ private static final String TAG = "PCRTCClient";
+ private static final String VIDEO_CODEC_VP8 = "VP8";
+ private static final String VIDEO_CODEC_VP9 = "VP9";
+ private static final String VIDEO_CODEC_H264 = "H264";
+ private static final String VIDEO_CODEC_H264_BASELINE = "H264 Baseline";
+ private static final String VIDEO_CODEC_H264_HIGH = "H264 High";
+ private static final String VIDEO_CODEC_AV1 = "AV1";
+ private static final String AUDIO_CODEC_OPUS = "opus";
+ private static final String AUDIO_CODEC_ISAC = "ISAC";
+ private static final String VIDEO_CODEC_PARAM_START_BITRATE = "x-google-start-bitrate";
+ private static final String VIDEO_FLEXFEC_FIELDTRIAL =
+ "WebRTC-FlexFEC-03-Advertised/Enabled/WebRTC-FlexFEC-03/Enabled/";
+ private static final String DISABLE_WEBRTC_AGC_FIELDTRIAL =
+ "WebRTC-Audio-MinimizeResamplingOnMobile/Enabled/";
+ private static final String AUDIO_CODEC_PARAM_BITRATE = "maxaveragebitrate";
+ private static final String AUDIO_ECHO_CANCELLATION_CONSTRAINT = "googEchoCancellation";
+ private static final String AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT = "googAutoGainControl";
+ private static final String AUDIO_HIGH_PASS_FILTER_CONSTRAINT = "googHighpassFilter";
+ private static final String AUDIO_NOISE_SUPPRESSION_CONSTRAINT = "googNoiseSuppression";
+ private static final String DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT = "DtlsSrtpKeyAgreement";
+ private static final int HD_VIDEO_WIDTH = 1280;
+ private static final int HD_VIDEO_HEIGHT = 720;
+ private static final int BPS_IN_KBPS = 1000;
+ private static final String RTCEVENTLOG_OUTPUT_DIR_NAME = "rtc_event_log";
+
+ // Executor thread is started once in private ctor and is used for all
+ // peer connection API calls to ensure new peer connection factory is
+ // created on the same thread as previously destroyed factory.
+ private static final ExecutorService executor = Executors.newSingleThreadExecutor();
+
+ private final PCObserver pcObserver = new PCObserver();
+ private final SDPObserver sdpObserver = new SDPObserver();
+ private final Timer statsTimer = new Timer();
+ private final EglBase rootEglBase;
+ private final Context appContext;
+ private final PeerConnectionParameters peerConnectionParameters;
+ private final PeerConnectionEvents events;
+
+ @Nullable
+ private PeerConnectionFactory factory;
+ @Nullable
+ private PeerConnection peerConnection;
+ @Nullable
+ private AudioSource audioSource;
+ @Nullable private SurfaceTextureHelper surfaceTextureHelper;
+ @Nullable private VideoSource videoSource;
+ private boolean preferIsac;
+ private boolean videoCapturerStopped;
+ private boolean isError;
+ @Nullable
+ private VideoSink localRender;
+ @Nullable private List<VideoSink> remoteSinks;
+ private SignalingParameters signalingParameters;
+ private int videoWidth;
+ private int videoHeight;
+ private int videoFps;
+ private MediaConstraints audioConstraints;
+ private MediaConstraints sdpMediaConstraints;
+ // Queued remote ICE candidates are consumed only after both local and
+ // remote descriptions are set. Similarly local ICE candidates are sent to
+ // remote peer after both local and remote description are set.
+ @Nullable
+ private List<IceCandidate> queuedRemoteCandidates;
+ private boolean isInitiator;
+ @Nullable private SessionDescription localDescription; // either offer or answer description
+ @Nullable
+ private VideoCapturer videoCapturer;
+ // enableVideo is set to true if video should be rendered and sent.
+ private boolean renderVideo = true;
+ @Nullable
+ private VideoTrack localVideoTrack;
+ @Nullable
+ private VideoTrack remoteVideoTrack;
+ @Nullable
+ private RtpSender localVideoSender;
+ // enableAudio is set to true if audio should be sent.
+ private boolean enableAudio = true;
+ @Nullable
+ private AudioTrack localAudioTrack;
+ @Nullable
+ private DataChannel dataChannel;
+ private final boolean dataChannelEnabled;
+ // Enable RtcEventLog.
+ @Nullable
+ private RtcEventLog rtcEventLog;
+ // Implements the WebRtcAudioRecordSamplesReadyCallback interface and writes
+ // recorded audio samples to an output file.
+ @Nullable private RecordedAudioToFileController saveRecordedAudioToFile;
+
+ /**
+ * Peer connection parameters.
+ */
+ public static class DataChannelParameters {
+ public final boolean ordered;
+ public final int maxRetransmitTimeMs;
+ public final int maxRetransmits;
+ public final String protocol;
+ public final boolean negotiated;
+ public final int id;
+
+ public DataChannelParameters(boolean ordered, int maxRetransmitTimeMs, int maxRetransmits,
+ String protocol, boolean negotiated, int id) {
+ this.ordered = ordered;
+ this.maxRetransmitTimeMs = maxRetransmitTimeMs;
+ this.maxRetransmits = maxRetransmits;
+ this.protocol = protocol;
+ this.negotiated = negotiated;
+ this.id = id;
+ }
+ }
+
+ /**
+ * Peer connection parameters.
+ */
+ public static class PeerConnectionParameters {
+ public final boolean videoCallEnabled;
+ public final boolean loopback;
+ public final boolean tracing;
+ public final int videoWidth;
+ public final int videoHeight;
+ public final int videoFps;
+ public final int videoMaxBitrate;
+ public final String videoCodec;
+ public final boolean videoCodecHwAcceleration;
+ public final boolean videoFlexfecEnabled;
+ public final int audioStartBitrate;
+ public final String audioCodec;
+ public final boolean noAudioProcessing;
+ public final boolean aecDump;
+ public final boolean saveInputAudioToFile;
+ public final boolean useOpenSLES;
+ public final boolean disableBuiltInAEC;
+ public final boolean disableBuiltInAGC;
+ public final boolean disableBuiltInNS;
+ public final boolean disableWebRtcAGCAndHPF;
+ public final boolean enableRtcEventLog;
+ private final DataChannelParameters dataChannelParameters;
+
+ public PeerConnectionParameters(boolean videoCallEnabled, boolean loopback, boolean tracing,
+ int videoWidth, int videoHeight, int videoFps, int videoMaxBitrate, String videoCodec,
+ boolean videoCodecHwAcceleration, boolean videoFlexfecEnabled, int audioStartBitrate,
+ String audioCodec, boolean noAudioProcessing, boolean aecDump, boolean saveInputAudioToFile,
+ boolean useOpenSLES, boolean disableBuiltInAEC, boolean disableBuiltInAGC,
+ boolean disableBuiltInNS, boolean disableWebRtcAGCAndHPF, boolean enableRtcEventLog,
+ DataChannelParameters dataChannelParameters) {
+ this.videoCallEnabled = videoCallEnabled;
+ this.loopback = loopback;
+ this.tracing = tracing;
+ this.videoWidth = videoWidth;
+ this.videoHeight = videoHeight;
+ this.videoFps = videoFps;
+ this.videoMaxBitrate = videoMaxBitrate;
+ this.videoCodec = videoCodec;
+ this.videoFlexfecEnabled = videoFlexfecEnabled;
+ this.videoCodecHwAcceleration = videoCodecHwAcceleration;
+ this.audioStartBitrate = audioStartBitrate;
+ this.audioCodec = audioCodec;
+ this.noAudioProcessing = noAudioProcessing;
+ this.aecDump = aecDump;
+ this.saveInputAudioToFile = saveInputAudioToFile;
+ this.useOpenSLES = useOpenSLES;
+ this.disableBuiltInAEC = disableBuiltInAEC;
+ this.disableBuiltInAGC = disableBuiltInAGC;
+ this.disableBuiltInNS = disableBuiltInNS;
+ this.disableWebRtcAGCAndHPF = disableWebRtcAGCAndHPF;
+ this.enableRtcEventLog = enableRtcEventLog;
+ this.dataChannelParameters = dataChannelParameters;
+ }
+ }
+
+ /**
+ * Peer connection events.
+ */
+ public interface PeerConnectionEvents {
+ /**
+ * Callback fired once local SDP is created and set.
+ */
+ void onLocalDescription(final SessionDescription sdp);
+
+ /**
+ * Callback fired once local Ice candidate is generated.
+ */
+ void onIceCandidate(final IceCandidate candidate);
+
+ /**
+ * Callback fired once local ICE candidates are removed.
+ */
+ void onIceCandidatesRemoved(final IceCandidate[] candidates);
+
+ /**
+ * Callback fired once connection is established (IceConnectionState is
+ * CONNECTED).
+ */
+ void onIceConnected();
+
+ /**
+ * Callback fired once connection is disconnected (IceConnectionState is
+ * DISCONNECTED).
+ */
+ void onIceDisconnected();
+
+ /**
+ * Callback fired once DTLS connection is established (PeerConnectionState
+ * is CONNECTED).
+ */
+ void onConnected();
+
+ /**
+ * Callback fired once DTLS connection is disconnected (PeerConnectionState
+ * is DISCONNECTED).
+ */
+ void onDisconnected();
+
+ /**
+ * Callback fired once peer connection is closed.
+ */
+ void onPeerConnectionClosed();
+
+ /**
+ * Callback fired once peer connection statistics is ready.
+ */
+ void onPeerConnectionStatsReady(final RTCStatsReport report);
+
+ /**
+ * Callback fired once peer connection error happened.
+ */
+ void onPeerConnectionError(final String description);
+ }
+
+ /**
+ * Create a PeerConnectionClient with the specified parameters. PeerConnectionClient takes
+ * ownership of `eglBase`.
+ */
+ public PeerConnectionClient(Context appContext, EglBase eglBase,
+ PeerConnectionParameters peerConnectionParameters, PeerConnectionEvents events) {
+ this.rootEglBase = eglBase;
+ this.appContext = appContext;
+ this.events = events;
+ this.peerConnectionParameters = peerConnectionParameters;
+ this.dataChannelEnabled = peerConnectionParameters.dataChannelParameters != null;
+
+ Log.d(TAG, "Preferred video codec: " + getSdpVideoCodecName(peerConnectionParameters));
+
+ final String fieldTrials = getFieldTrials(peerConnectionParameters);
+ executor.execute(() -> {
+ Log.d(TAG, "Initialize WebRTC. Field trials: " + fieldTrials);
+ PeerConnectionFactory.initialize(
+ PeerConnectionFactory.InitializationOptions.builder(appContext)
+ .setFieldTrials(fieldTrials)
+ .setEnableInternalTracer(true)
+ .createInitializationOptions());
+ });
+ }
+
+ /**
+ * This function should only be called once.
+ */
+ public void createPeerConnectionFactory(PeerConnectionFactory.Options options) {
+ if (factory != null) {
+ throw new IllegalStateException("PeerConnectionFactory has already been constructed");
+ }
+ executor.execute(() -> createPeerConnectionFactoryInternal(options));
+ }
+
+ public void createPeerConnection(final VideoSink localRender, final VideoSink remoteSink,
+ final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) {
+ if (peerConnectionParameters.videoCallEnabled && videoCapturer == null) {
+ Log.w(TAG, "Video call enabled but no video capturer provided.");
+ }
+ createPeerConnection(
+ localRender, Collections.singletonList(remoteSink), videoCapturer, signalingParameters);
+ }
+
+ public void createPeerConnection(final VideoSink localRender, final List<VideoSink> remoteSinks,
+ final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) {
+ if (peerConnectionParameters == null) {
+ Log.e(TAG, "Creating peer connection without initializing factory.");
+ return;
+ }
+ this.localRender = localRender;
+ this.remoteSinks = remoteSinks;
+ this.videoCapturer = videoCapturer;
+ this.signalingParameters = signalingParameters;
+ executor.execute(() -> {
+ try {
+ createMediaConstraintsInternal();
+ createPeerConnectionInternal();
+ maybeCreateAndStartRtcEventLog();
+ } catch (Exception e) {
+ reportError("Failed to create peer connection: " + e.getMessage());
+ throw e;
+ }
+ });
+ }
+
+ public void close() {
+ executor.execute(this ::closeInternal);
+ }
+
+ private boolean isVideoCallEnabled() {
+ return peerConnectionParameters.videoCallEnabled && videoCapturer != null;
+ }
+
+ private void createPeerConnectionFactoryInternal(PeerConnectionFactory.Options options) {
+ isError = false;
+
+ if (peerConnectionParameters.tracing) {
+ PeerConnectionFactory.startInternalTracingCapture(
+ Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator
+ + "webrtc-trace.txt");
+ }
+
+ // Check if ISAC is used by default.
+ preferIsac = peerConnectionParameters.audioCodec != null
+ && peerConnectionParameters.audioCodec.equals(AUDIO_CODEC_ISAC);
+
+ // It is possible to save a copy in raw PCM format on a file by checking
+ // the "Save input audio to file" checkbox in the Settings UI. A callback
+ // interface is set when this flag is enabled. As a result, a copy of recorded
+ // audio samples are provided to this client directly from the native audio
+ // layer in Java.
+ if (peerConnectionParameters.saveInputAudioToFile) {
+ if (!peerConnectionParameters.useOpenSLES) {
+ Log.d(TAG, "Enable recording of microphone input audio to file");
+ saveRecordedAudioToFile = new RecordedAudioToFileController(executor);
+ } else {
+ // TODO(henrika): ensure that the UI reflects that if OpenSL ES is selected,
+ // then the "Save inut audio to file" option shall be grayed out.
+ Log.e(TAG, "Recording of input audio is not supported for OpenSL ES");
+ }
+ }
+
+ final AudioDeviceModule adm = createJavaAudioDevice();
+
+ // Create peer connection factory.
+ if (options != null) {
+ Log.d(TAG, "Factory networkIgnoreMask option: " + options.networkIgnoreMask);
+ }
+ final boolean enableH264HighProfile =
+ VIDEO_CODEC_H264_HIGH.equals(peerConnectionParameters.videoCodec);
+ final VideoEncoderFactory encoderFactory;
+ final VideoDecoderFactory decoderFactory;
+
+ if (peerConnectionParameters.videoCodecHwAcceleration) {
+ encoderFactory = new DefaultVideoEncoderFactory(
+ rootEglBase.getEglBaseContext(), true /* enableIntelVp8Encoder */, enableH264HighProfile);
+ decoderFactory = new DefaultVideoDecoderFactory(rootEglBase.getEglBaseContext());
+ } else {
+ encoderFactory = new SoftwareVideoEncoderFactory();
+ decoderFactory = new SoftwareVideoDecoderFactory();
+ }
+
+ // Disable encryption for loopback calls.
+ if (peerConnectionParameters.loopback) {
+ options.disableEncryption = true;
+ }
+ factory = PeerConnectionFactory.builder()
+ .setOptions(options)
+ .setAudioDeviceModule(adm)
+ .setVideoEncoderFactory(encoderFactory)
+ .setVideoDecoderFactory(decoderFactory)
+ .createPeerConnectionFactory();
+ Log.d(TAG, "Peer connection factory created.");
+ adm.release();
+ }
+
+ AudioDeviceModule createJavaAudioDevice() {
+ // Enable/disable OpenSL ES playback.
+ if (!peerConnectionParameters.useOpenSLES) {
+ Log.w(TAG, "External OpenSLES ADM not implemented yet.");
+ // TODO(magjed): Add support for external OpenSLES ADM.
+ }
+
+ // Set audio record error callbacks.
+ AudioRecordErrorCallback audioRecordErrorCallback = new AudioRecordErrorCallback() {
+ @Override
+ public void onWebRtcAudioRecordInitError(String errorMessage) {
+ Log.e(TAG, "onWebRtcAudioRecordInitError: " + errorMessage);
+ reportError(errorMessage);
+ }
+
+ @Override
+ public void onWebRtcAudioRecordStartError(
+ JavaAudioDeviceModule.AudioRecordStartErrorCode errorCode, String errorMessage) {
+ Log.e(TAG, "onWebRtcAudioRecordStartError: " + errorCode + ". " + errorMessage);
+ reportError(errorMessage);
+ }
+
+ @Override
+ public void onWebRtcAudioRecordError(String errorMessage) {
+ Log.e(TAG, "onWebRtcAudioRecordError: " + errorMessage);
+ reportError(errorMessage);
+ }
+ };
+
+ AudioTrackErrorCallback audioTrackErrorCallback = new AudioTrackErrorCallback() {
+ @Override
+ public void onWebRtcAudioTrackInitError(String errorMessage) {
+ Log.e(TAG, "onWebRtcAudioTrackInitError: " + errorMessage);
+ reportError(errorMessage);
+ }
+
+ @Override
+ public void onWebRtcAudioTrackStartError(
+ JavaAudioDeviceModule.AudioTrackStartErrorCode errorCode, String errorMessage) {
+ Log.e(TAG, "onWebRtcAudioTrackStartError: " + errorCode + ". " + errorMessage);
+ reportError(errorMessage);
+ }
+
+ @Override
+ public void onWebRtcAudioTrackError(String errorMessage) {
+ Log.e(TAG, "onWebRtcAudioTrackError: " + errorMessage);
+ reportError(errorMessage);
+ }
+ };
+
+ // Set audio record state callbacks.
+ AudioRecordStateCallback audioRecordStateCallback = new AudioRecordStateCallback() {
+ @Override
+ public void onWebRtcAudioRecordStart() {
+ Log.i(TAG, "Audio recording starts");
+ }
+
+ @Override
+ public void onWebRtcAudioRecordStop() {
+ Log.i(TAG, "Audio recording stops");
+ }
+ };
+
+ // Set audio track state callbacks.
+ AudioTrackStateCallback audioTrackStateCallback = new AudioTrackStateCallback() {
+ @Override
+ public void onWebRtcAudioTrackStart() {
+ Log.i(TAG, "Audio playout starts");
+ }
+
+ @Override
+ public void onWebRtcAudioTrackStop() {
+ Log.i(TAG, "Audio playout stops");
+ }
+ };
+
+ return JavaAudioDeviceModule.builder(appContext)
+ .setSamplesReadyCallback(saveRecordedAudioToFile)
+ .setUseHardwareAcousticEchoCanceler(!peerConnectionParameters.disableBuiltInAEC)
+ .setUseHardwareNoiseSuppressor(!peerConnectionParameters.disableBuiltInNS)
+ .setAudioRecordErrorCallback(audioRecordErrorCallback)
+ .setAudioTrackErrorCallback(audioTrackErrorCallback)
+ .setAudioRecordStateCallback(audioRecordStateCallback)
+ .setAudioTrackStateCallback(audioTrackStateCallback)
+ .createAudioDeviceModule();
+ }
+
+ private void createMediaConstraintsInternal() {
+ // Create video constraints if video call is enabled.
+ if (isVideoCallEnabled()) {
+ videoWidth = peerConnectionParameters.videoWidth;
+ videoHeight = peerConnectionParameters.videoHeight;
+ videoFps = peerConnectionParameters.videoFps;
+
+ // If video resolution is not specified, default to HD.
+ if (videoWidth == 0 || videoHeight == 0) {
+ videoWidth = HD_VIDEO_WIDTH;
+ videoHeight = HD_VIDEO_HEIGHT;
+ }
+
+ // If fps is not specified, default to 30.
+ if (videoFps == 0) {
+ videoFps = 30;
+ }
+ Logging.d(TAG, "Capturing format: " + videoWidth + "x" + videoHeight + "@" + videoFps);
+ }
+
+ // Create audio constraints.
+ audioConstraints = new MediaConstraints();
+ // added for audio performance measurements
+ if (peerConnectionParameters.noAudioProcessing) {
+ Log.d(TAG, "Disabling audio processing");
+ audioConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair(AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false"));
+ audioConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair(AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false"));
+ audioConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair(AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false"));
+ audioConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair(AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "false"));
+ }
+ // Create SDP constraints.
+ sdpMediaConstraints = new MediaConstraints();
+ sdpMediaConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
+ sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
+ "OfferToReceiveVideo", Boolean.toString(isVideoCallEnabled())));
+ }
+
+ private void createPeerConnectionInternal() {
+ if (factory == null || isError) {
+ Log.e(TAG, "Peerconnection factory is not created");
+ return;
+ }
+ Log.d(TAG, "Create peer connection.");
+
+ queuedRemoteCandidates = new ArrayList<>();
+
+ PeerConnection.RTCConfiguration rtcConfig =
+ new PeerConnection.RTCConfiguration(signalingParameters.iceServers);
+ // TCP candidates are only useful when connecting to a server that supports
+ // ICE-TCP.
+ rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED;
+ rtcConfig.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE;
+ rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE;
+ rtcConfig.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY;
+ // Use ECDSA encryption.
+ rtcConfig.keyType = PeerConnection.KeyType.ECDSA;
+ rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
+
+ peerConnection = factory.createPeerConnection(rtcConfig, pcObserver);
+
+ if (dataChannelEnabled) {
+ DataChannel.Init init = new DataChannel.Init();
+ init.ordered = peerConnectionParameters.dataChannelParameters.ordered;
+ init.negotiated = peerConnectionParameters.dataChannelParameters.negotiated;
+ init.maxRetransmits = peerConnectionParameters.dataChannelParameters.maxRetransmits;
+ init.maxRetransmitTimeMs = peerConnectionParameters.dataChannelParameters.maxRetransmitTimeMs;
+ init.id = peerConnectionParameters.dataChannelParameters.id;
+ init.protocol = peerConnectionParameters.dataChannelParameters.protocol;
+ dataChannel = peerConnection.createDataChannel("ApprtcDemo data", init);
+ }
+ isInitiator = false;
+
+ // Set INFO libjingle logging.
+ // NOTE: this _must_ happen while `factory` is alive!
+ Logging.enableLogToDebugOutput(Logging.Severity.LS_INFO);
+
+ List<String> mediaStreamLabels = Collections.singletonList("ARDAMS");
+ if (isVideoCallEnabled()) {
+ peerConnection.addTrack(createVideoTrack(videoCapturer), mediaStreamLabels);
+ // We can add the renderers right away because we don't need to wait for an
+ // answer to get the remote track.
+ remoteVideoTrack = getRemoteVideoTrack();
+ remoteVideoTrack.setEnabled(renderVideo);
+ for (VideoSink remoteSink : remoteSinks) {
+ remoteVideoTrack.addSink(remoteSink);
+ }
+ }
+ peerConnection.addTrack(createAudioTrack(), mediaStreamLabels);
+ if (isVideoCallEnabled()) {
+ findVideoSender();
+ }
+
+ if (peerConnectionParameters.aecDump) {
+ try {
+ ParcelFileDescriptor aecDumpFileDescriptor =
+ ParcelFileDescriptor.open(new File(Environment.getExternalStorageDirectory().getPath()
+ + File.separator + "Download/audio.aecdump"),
+ ParcelFileDescriptor.MODE_READ_WRITE | ParcelFileDescriptor.MODE_CREATE
+ | ParcelFileDescriptor.MODE_TRUNCATE);
+ factory.startAecDump(aecDumpFileDescriptor.detachFd(), -1);
+ } catch (IOException e) {
+ Log.e(TAG, "Can not open aecdump file", e);
+ }
+ }
+
+ if (saveRecordedAudioToFile != null) {
+ if (saveRecordedAudioToFile.start()) {
+ Log.d(TAG, "Recording input audio to file is activated");
+ }
+ }
+ Log.d(TAG, "Peer connection created.");
+ }
+
+ private File createRtcEventLogOutputFile() {
+ DateFormat dateFormat = new SimpleDateFormat("yyyyMMdd_hhmm_ss", Locale.getDefault());
+ Date date = new Date();
+ final String outputFileName = "event_log_" + dateFormat.format(date) + ".log";
+ return new File(
+ appContext.getDir(RTCEVENTLOG_OUTPUT_DIR_NAME, Context.MODE_PRIVATE), outputFileName);
+ }
+
+ private void maybeCreateAndStartRtcEventLog() {
+ if (appContext == null || peerConnection == null) {
+ return;
+ }
+ if (!peerConnectionParameters.enableRtcEventLog) {
+ Log.d(TAG, "RtcEventLog is disabled.");
+ return;
+ }
+ rtcEventLog = new RtcEventLog(peerConnection);
+ rtcEventLog.start(createRtcEventLogOutputFile());
+ }
+
+ private void closeInternal() {
+ if (factory != null && peerConnectionParameters.aecDump) {
+ factory.stopAecDump();
+ }
+ Log.d(TAG, "Closing peer connection.");
+ statsTimer.cancel();
+ if (dataChannel != null) {
+ dataChannel.dispose();
+ dataChannel = null;
+ }
+ if (rtcEventLog != null) {
+ // RtcEventLog should stop before the peer connection is disposed.
+ rtcEventLog.stop();
+ rtcEventLog = null;
+ }
+ if (peerConnection != null) {
+ peerConnection.dispose();
+ peerConnection = null;
+ }
+ Log.d(TAG, "Closing audio source.");
+ if (audioSource != null) {
+ audioSource.dispose();
+ audioSource = null;
+ }
+ Log.d(TAG, "Stopping capture.");
+ if (videoCapturer != null) {
+ try {
+ videoCapturer.stopCapture();
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ videoCapturerStopped = true;
+ videoCapturer.dispose();
+ videoCapturer = null;
+ }
+ Log.d(TAG, "Closing video source.");
+ if (videoSource != null) {
+ videoSource.dispose();
+ videoSource = null;
+ }
+ if (surfaceTextureHelper != null) {
+ surfaceTextureHelper.dispose();
+ surfaceTextureHelper = null;
+ }
+ if (saveRecordedAudioToFile != null) {
+ Log.d(TAG, "Closing audio file for recorded input audio.");
+ saveRecordedAudioToFile.stop();
+ saveRecordedAudioToFile = null;
+ }
+ localRender = null;
+ remoteSinks = null;
+ Log.d(TAG, "Closing peer connection factory.");
+ if (factory != null) {
+ factory.dispose();
+ factory = null;
+ }
+ rootEglBase.release();
+ Log.d(TAG, "Closing peer connection done.");
+ events.onPeerConnectionClosed();
+ PeerConnectionFactory.stopInternalTracingCapture();
+ PeerConnectionFactory.shutdownInternalTracer();
+ }
+
+ public boolean isHDVideo() {
+ return isVideoCallEnabled() && videoWidth * videoHeight >= 1280 * 720;
+ }
+
+ private void getStats() {
+ if (peerConnection == null || isError) {
+ return;
+ }
+ peerConnection.getStats(new RTCStatsCollectorCallback() {
+ @Override
+ public void onStatsDelivered(RTCStatsReport report) {
+ events.onPeerConnectionStatsReady(report);
+ }
+ });
+ }
+
+ public void enableStatsEvents(boolean enable, int periodMs) {
+ if (enable) {
+ try {
+ statsTimer.schedule(new TimerTask() {
+ @Override
+ public void run() {
+ executor.execute(() -> getStats());
+ }
+ }, 0, periodMs);
+ } catch (Exception e) {
+ Log.e(TAG, "Can not schedule statistics timer", e);
+ }
+ } else {
+ statsTimer.cancel();
+ }
+ }
+
+ public void setAudioEnabled(final boolean enable) {
+ executor.execute(() -> {
+ enableAudio = enable;
+ if (localAudioTrack != null) {
+ localAudioTrack.setEnabled(enableAudio);
+ }
+ });
+ }
+
+ public void setVideoEnabled(final boolean enable) {
+ executor.execute(() -> {
+ renderVideo = enable;
+ if (localVideoTrack != null) {
+ localVideoTrack.setEnabled(renderVideo);
+ }
+ if (remoteVideoTrack != null) {
+ remoteVideoTrack.setEnabled(renderVideo);
+ }
+ });
+ }
+
+ public void createOffer() {
+ executor.execute(() -> {
+ if (peerConnection != null && !isError) {
+ Log.d(TAG, "PC Create OFFER");
+ isInitiator = true;
+ peerConnection.createOffer(sdpObserver, sdpMediaConstraints);
+ }
+ });
+ }
+
+ public void createAnswer() {
+ executor.execute(() -> {
+ if (peerConnection != null && !isError) {
+ Log.d(TAG, "PC create ANSWER");
+ isInitiator = false;
+ peerConnection.createAnswer(sdpObserver, sdpMediaConstraints);
+ }
+ });
+ }
+
+ public void addRemoteIceCandidate(final IceCandidate candidate) {
+ executor.execute(() -> {
+ if (peerConnection != null && !isError) {
+ if (queuedRemoteCandidates != null) {
+ queuedRemoteCandidates.add(candidate);
+ } else {
+ peerConnection.addIceCandidate(candidate, new AddIceObserver() {
+ @Override
+ public void onAddSuccess() {
+ Log.d(TAG, "Candidate " + candidate + " successfully added.");
+ }
+ @Override
+ public void onAddFailure(String error) {
+ Log.d(TAG, "Candidate " + candidate + " addition failed: " + error);
+ }
+ });
+ }
+ }
+ });
+ }
+
+ public void removeRemoteIceCandidates(final IceCandidate[] candidates) {
+ executor.execute(() -> {
+ if (peerConnection == null || isError) {
+ return;
+ }
+ // Drain the queued remote candidates if there is any so that
+ // they are processed in the proper order.
+ drainCandidates();
+ peerConnection.removeIceCandidates(candidates);
+ });
+ }
+
+ public void setRemoteDescription(final SessionDescription desc) {
+ executor.execute(() -> {
+ if (peerConnection == null || isError) {
+ return;
+ }
+ String sdp = desc.description;
+ if (preferIsac) {
+ sdp = preferCodec(sdp, AUDIO_CODEC_ISAC, true);
+ }
+ if (isVideoCallEnabled()) {
+ sdp = preferCodec(sdp, getSdpVideoCodecName(peerConnectionParameters), false);
+ }
+ if (peerConnectionParameters.audioStartBitrate > 0) {
+ sdp = setStartBitrate(
+ AUDIO_CODEC_OPUS, false, sdp, peerConnectionParameters.audioStartBitrate);
+ }
+ Log.d(TAG, "Set remote SDP.");
+ SessionDescription sdpRemote = new SessionDescription(desc.type, sdp);
+ peerConnection.setRemoteDescription(sdpObserver, sdpRemote);
+ });
+ }
+
+ public void stopVideoSource() {
+ executor.execute(() -> {
+ if (videoCapturer != null && !videoCapturerStopped) {
+ Log.d(TAG, "Stop video source.");
+ try {
+ videoCapturer.stopCapture();
+ } catch (InterruptedException e) {
+ }
+ videoCapturerStopped = true;
+ }
+ });
+ }
+
+ public void startVideoSource() {
+ executor.execute(() -> {
+ if (videoCapturer != null && videoCapturerStopped) {
+ Log.d(TAG, "Restart video source.");
+ videoCapturer.startCapture(videoWidth, videoHeight, videoFps);
+ videoCapturerStopped = false;
+ }
+ });
+ }
+
+ public void setVideoMaxBitrate(@Nullable final Integer maxBitrateKbps) {
+ executor.execute(() -> {
+ if (peerConnection == null || localVideoSender == null || isError) {
+ return;
+ }
+ Log.d(TAG, "Requested max video bitrate: " + maxBitrateKbps);
+ if (localVideoSender == null) {
+ Log.w(TAG, "Sender is not ready.");
+ return;
+ }
+
+ RtpParameters parameters = localVideoSender.getParameters();
+ if (parameters.encodings.size() == 0) {
+ Log.w(TAG, "RtpParameters are not ready.");
+ return;
+ }
+
+ for (RtpParameters.Encoding encoding : parameters.encodings) {
+ // Null value means no limit.
+ encoding.maxBitrateBps = maxBitrateKbps == null ? null : maxBitrateKbps * BPS_IN_KBPS;
+ }
+ if (!localVideoSender.setParameters(parameters)) {
+ Log.e(TAG, "RtpSender.setParameters failed.");
+ }
+ Log.d(TAG, "Configured max video bitrate to: " + maxBitrateKbps);
+ });
+ }
+
+ private void reportError(final String errorMessage) {
+ Log.e(TAG, "Peerconnection error: " + errorMessage);
+ executor.execute(() -> {
+ if (!isError) {
+ events.onPeerConnectionError(errorMessage);
+ isError = true;
+ }
+ });
+ }
+
+ @Nullable
+ private AudioTrack createAudioTrack() {
+ audioSource = factory.createAudioSource(audioConstraints);
+ localAudioTrack = factory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
+ localAudioTrack.setEnabled(enableAudio);
+ return localAudioTrack;
+ }
+
+ @Nullable
+ private VideoTrack createVideoTrack(VideoCapturer capturer) {
+ surfaceTextureHelper =
+ SurfaceTextureHelper.create("CaptureThread", rootEglBase.getEglBaseContext());
+ videoSource = factory.createVideoSource(capturer.isScreencast());
+ capturer.initialize(surfaceTextureHelper, appContext, videoSource.getCapturerObserver());
+ capturer.startCapture(videoWidth, videoHeight, videoFps);
+
+ localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
+ localVideoTrack.setEnabled(renderVideo);
+ localVideoTrack.addSink(localRender);
+ return localVideoTrack;
+ }
+
+ private void findVideoSender() {
+ for (RtpSender sender : peerConnection.getSenders()) {
+ if (sender.track() != null) {
+ String trackType = sender.track().kind();
+ if (trackType.equals(VIDEO_TRACK_TYPE)) {
+ Log.d(TAG, "Found video sender.");
+ localVideoSender = sender;
+ }
+ }
+ }
+ }
+
+ // Returns the remote VideoTrack, assuming there is only one.
+ private @Nullable VideoTrack getRemoteVideoTrack() {
+ for (RtpTransceiver transceiver : peerConnection.getTransceivers()) {
+ MediaStreamTrack track = transceiver.getReceiver().track();
+ if (track instanceof VideoTrack) {
+ return (VideoTrack) track;
+ }
+ }
+ return null;
+ }
+
+ private static String getSdpVideoCodecName(PeerConnectionParameters parameters) {
+ switch (parameters.videoCodec) {
+ case VIDEO_CODEC_VP8:
+ return VIDEO_CODEC_VP8;
+ case VIDEO_CODEC_VP9:
+ return VIDEO_CODEC_VP9;
+ case VIDEO_CODEC_AV1:
+ return VIDEO_CODEC_AV1;
+ case VIDEO_CODEC_H264_HIGH:
+ case VIDEO_CODEC_H264_BASELINE:
+ return VIDEO_CODEC_H264;
+ default:
+ return VIDEO_CODEC_VP8;
+ }
+ }
+
+ private static String getFieldTrials(PeerConnectionParameters peerConnectionParameters) {
+ String fieldTrials = "";
+ if (peerConnectionParameters.videoFlexfecEnabled) {
+ fieldTrials += VIDEO_FLEXFEC_FIELDTRIAL;
+ Log.d(TAG, "Enable FlexFEC field trial.");
+ }
+ if (peerConnectionParameters.disableWebRtcAGCAndHPF) {
+ fieldTrials += DISABLE_WEBRTC_AGC_FIELDTRIAL;
+ Log.d(TAG, "Disable WebRTC AGC field trial.");
+ }
+ return fieldTrials;
+ }
+
+ @SuppressWarnings("StringSplitter")
+ private static String setStartBitrate(
+ String codec, boolean isVideoCodec, String sdp, int bitrateKbps) {
+ String[] lines = sdp.split("\r\n");
+ int rtpmapLineIndex = -1;
+ boolean sdpFormatUpdated = false;
+ String codecRtpMap = null;
+ // Search for codec rtpmap in format
+ // a=rtpmap:<payload type> <encoding name>/<clock rate> [/<encoding parameters>]
+ String regex = "^a=rtpmap:(\\d+) " + codec + "(/\\d+)+[\r]?$";
+ Pattern codecPattern = Pattern.compile(regex);
+ for (int i = 0; i < lines.length; i++) {
+ Matcher codecMatcher = codecPattern.matcher(lines[i]);
+ if (codecMatcher.matches()) {
+ codecRtpMap = codecMatcher.group(1);
+ rtpmapLineIndex = i;
+ break;
+ }
+ }
+ if (codecRtpMap == null) {
+ Log.w(TAG, "No rtpmap for " + codec + " codec");
+ return sdp;
+ }
+ Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap + " at " + lines[rtpmapLineIndex]);
+
+ // Check if a=fmtp string already exist in remote SDP for this codec and
+ // update it with new bitrate parameter.
+ regex = "^a=fmtp:" + codecRtpMap + " \\w+=\\d+.*[\r]?$";
+ codecPattern = Pattern.compile(regex);
+ for (int i = 0; i < lines.length; i++) {
+ Matcher codecMatcher = codecPattern.matcher(lines[i]);
+ if (codecMatcher.matches()) {
+ Log.d(TAG, "Found " + codec + " " + lines[i]);
+ if (isVideoCodec) {
+ lines[i] += "; " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps;
+ } else {
+ lines[i] += "; " + AUDIO_CODEC_PARAM_BITRATE + "=" + (bitrateKbps * 1000);
+ }
+ Log.d(TAG, "Update remote SDP line: " + lines[i]);
+ sdpFormatUpdated = true;
+ break;
+ }
+ }
+
+ StringBuilder newSdpDescription = new StringBuilder();
+ for (int i = 0; i < lines.length; i++) {
+ newSdpDescription.append(lines[i]).append("\r\n");
+ // Append new a=fmtp line if no such line exist for a codec.
+ if (!sdpFormatUpdated && i == rtpmapLineIndex) {
+ String bitrateSet;
+ if (isVideoCodec) {
+ bitrateSet =
+ "a=fmtp:" + codecRtpMap + " " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps;
+ } else {
+ bitrateSet = "a=fmtp:" + codecRtpMap + " " + AUDIO_CODEC_PARAM_BITRATE + "="
+ + (bitrateKbps * 1000);
+ }
+ Log.d(TAG, "Add remote SDP line: " + bitrateSet);
+ newSdpDescription.append(bitrateSet).append("\r\n");
+ }
+ }
+ return newSdpDescription.toString();
+ }
+
+ /** Returns the line number containing "m=audio|video", or -1 if no such line exists. */
+ private static int findMediaDescriptionLine(boolean isAudio, String[] sdpLines) {
+ final String mediaDescription = isAudio ? "m=audio " : "m=video ";
+ for (int i = 0; i < sdpLines.length; ++i) {
+ if (sdpLines[i].startsWith(mediaDescription)) {
+ return i;
+ }
+ }
+ return -1;
+ }
+
+ private static String joinString(
+ Iterable<? extends CharSequence> s, String delimiter, boolean delimiterAtEnd) {
+ Iterator<? extends CharSequence> iter = s.iterator();
+ if (!iter.hasNext()) {
+ return "";
+ }
+ StringBuilder buffer = new StringBuilder(iter.next());
+ while (iter.hasNext()) {
+ buffer.append(delimiter).append(iter.next());
+ }
+ if (delimiterAtEnd) {
+ buffer.append(delimiter);
+ }
+ return buffer.toString();
+ }
+
+ private static @Nullable String movePayloadTypesToFront(
+ List<String> preferredPayloadTypes, String mLine) {
+ // The format of the media description line should be: m=<media> <port> <proto> <fmt> ...
+ final List<String> origLineParts = Arrays.asList(mLine.split(" "));
+ if (origLineParts.size() <= 3) {
+ Log.e(TAG, "Wrong SDP media description format: " + mLine);
+ return null;
+ }
+ final List<String> header = origLineParts.subList(0, 3);
+ final List<String> unpreferredPayloadTypes =
+ new ArrayList<>(origLineParts.subList(3, origLineParts.size()));
+ unpreferredPayloadTypes.removeAll(preferredPayloadTypes);
+ // Reconstruct the line with `preferredPayloadTypes` moved to the beginning of the payload
+ // types.
+ final List<String> newLineParts = new ArrayList<>();
+ newLineParts.addAll(header);
+ newLineParts.addAll(preferredPayloadTypes);
+ newLineParts.addAll(unpreferredPayloadTypes);
+ return joinString(newLineParts, " ", false /* delimiterAtEnd */);
+ }
+
+ private static String preferCodec(String sdp, String codec, boolean isAudio) {
+ final String[] lines = sdp.split("\r\n");
+ final int mLineIndex = findMediaDescriptionLine(isAudio, lines);
+ if (mLineIndex == -1) {
+ Log.w(TAG, "No mediaDescription line, so can't prefer " + codec);
+ return sdp;
+ }
+ // A list with all the payload types with name `codec`. The payload types are integers in the
+ // range 96-127, but they are stored as strings here.
+ final List<String> codecPayloadTypes = new ArrayList<>();
+ // a=rtpmap:<payload type> <encoding name>/<clock rate> [/<encoding parameters>]
+ final Pattern codecPattern = Pattern.compile("^a=rtpmap:(\\d+) " + codec + "(/\\d+)+[\r]?$");
+ for (String line : lines) {
+ Matcher codecMatcher = codecPattern.matcher(line);
+ if (codecMatcher.matches()) {
+ codecPayloadTypes.add(codecMatcher.group(1));
+ }
+ }
+ if (codecPayloadTypes.isEmpty()) {
+ Log.w(TAG, "No payload types with name " + codec);
+ return sdp;
+ }
+
+ final String newMLine = movePayloadTypesToFront(codecPayloadTypes, lines[mLineIndex]);
+ if (newMLine == null) {
+ return sdp;
+ }
+ Log.d(TAG, "Change media description from: " + lines[mLineIndex] + " to " + newMLine);
+ lines[mLineIndex] = newMLine;
+ return joinString(Arrays.asList(lines), "\r\n", true /* delimiterAtEnd */);
+ }
+
+ private void drainCandidates() {
+ if (queuedRemoteCandidates != null) {
+ Log.d(TAG, "Add " + queuedRemoteCandidates.size() + " remote candidates");
+ for (IceCandidate candidate : queuedRemoteCandidates) {
+ peerConnection.addIceCandidate(candidate, new AddIceObserver() {
+ @Override
+ public void onAddSuccess() {
+ Log.d(TAG, "Candidate " + candidate + " successfully added.");
+ }
+ @Override
+ public void onAddFailure(String error) {
+ Log.d(TAG, "Candidate " + candidate + " addition failed: " + error);
+ }
+ });
+ }
+ queuedRemoteCandidates = null;
+ }
+ }
+
+ private void switchCameraInternal() {
+ if (videoCapturer instanceof CameraVideoCapturer) {
+ if (!isVideoCallEnabled() || isError) {
+ Log.e(TAG,
+ "Failed to switch camera. Video: " + isVideoCallEnabled() + ". Error : " + isError);
+ return; // No video is sent or only one camera is available or error happened.
+ }
+ Log.d(TAG, "Switch camera");
+ CameraVideoCapturer cameraVideoCapturer = (CameraVideoCapturer) videoCapturer;
+ cameraVideoCapturer.switchCamera(null);
+ } else {
+ Log.d(TAG, "Will not switch camera, video caputurer is not a camera");
+ }
+ }
+
+ public void switchCamera() {
+ executor.execute(this ::switchCameraInternal);
+ }
+
+ public void changeCaptureFormat(final int width, final int height, final int framerate) {
+ executor.execute(() -> changeCaptureFormatInternal(width, height, framerate));
+ }
+
+ private void changeCaptureFormatInternal(int width, int height, int framerate) {
+ if (!isVideoCallEnabled() || isError || videoCapturer == null) {
+ Log.e(TAG,
+ "Failed to change capture format. Video: " + isVideoCallEnabled()
+ + ". Error : " + isError);
+ return;
+ }
+ Log.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate);
+ videoSource.adaptOutputFormat(width, height, framerate);
+ }
+
+ // Implementation detail: observe ICE & stream changes and react accordingly.
+ private class PCObserver implements PeerConnection.Observer {
+ @Override
+ public void onIceCandidate(final IceCandidate candidate) {
+ executor.execute(() -> events.onIceCandidate(candidate));
+ }
+
+ @Override
+ public void onIceCandidateError(final IceCandidateErrorEvent event) {
+ Log.d(TAG,
+ "IceCandidateError address: " + event.address + ", port: " + event.port + ", url: "
+ + event.url + ", errorCode: " + event.errorCode + ", errorText: " + event.errorText);
+ }
+
+ @Override
+ public void onIceCandidatesRemoved(final IceCandidate[] candidates) {
+ executor.execute(() -> events.onIceCandidatesRemoved(candidates));
+ }
+
+ @Override
+ public void onSignalingChange(PeerConnection.SignalingState newState) {
+ Log.d(TAG, "SignalingState: " + newState);
+ }
+
+ @Override
+ public void onIceConnectionChange(final PeerConnection.IceConnectionState newState) {
+ executor.execute(() -> {
+ Log.d(TAG, "IceConnectionState: " + newState);
+ if (newState == IceConnectionState.CONNECTED) {
+ events.onIceConnected();
+ } else if (newState == IceConnectionState.DISCONNECTED) {
+ events.onIceDisconnected();
+ } else if (newState == IceConnectionState.FAILED) {
+ reportError("ICE connection failed.");
+ }
+ });
+ }
+
+ @Override
+ public void onConnectionChange(final PeerConnection.PeerConnectionState newState) {
+ executor.execute(() -> {
+ Log.d(TAG, "PeerConnectionState: " + newState);
+ if (newState == PeerConnectionState.CONNECTED) {
+ events.onConnected();
+ } else if (newState == PeerConnectionState.DISCONNECTED) {
+ events.onDisconnected();
+ } else if (newState == PeerConnectionState.FAILED) {
+ reportError("DTLS connection failed.");
+ }
+ });
+ }
+
+ @Override
+ public void onIceGatheringChange(PeerConnection.IceGatheringState newState) {
+ Log.d(TAG, "IceGatheringState: " + newState);
+ }
+
+ @Override
+ public void onIceConnectionReceivingChange(boolean receiving) {
+ Log.d(TAG, "IceConnectionReceiving changed to " + receiving);
+ }
+
+ @Override
+ public void onSelectedCandidatePairChanged(CandidatePairChangeEvent event) {
+ Log.d(TAG, "Selected candidate pair changed because: " + event);
+ }
+
+ @Override
+ public void onAddStream(final MediaStream stream) {}
+
+ @Override
+ public void onRemoveStream(final MediaStream stream) {}
+
+ @Override
+ public void onDataChannel(final DataChannel dc) {
+ Log.d(TAG, "New Data channel " + dc.label());
+
+ if (!dataChannelEnabled)
+ return;
+
+ dc.registerObserver(new DataChannel.Observer() {
+ @Override
+ public void onBufferedAmountChange(long previousAmount) {
+ Log.d(TAG, "Data channel buffered amount changed: " + dc.label() + ": " + dc.state());
+ }
+
+ @Override
+ public void onStateChange() {
+ Log.d(TAG, "Data channel state changed: " + dc.label() + ": " + dc.state());
+ }
+
+ @Override
+ public void onMessage(final DataChannel.Buffer buffer) {
+ if (buffer.binary) {
+ Log.d(TAG, "Received binary msg over " + dc);
+ return;
+ }
+ ByteBuffer data = buffer.data;
+ final byte[] bytes = new byte[data.capacity()];
+ data.get(bytes);
+ String strData = new String(bytes, Charset.forName("UTF-8"));
+ Log.d(TAG, "Got msg: " + strData + " over " + dc);
+ }
+ });
+ }
+
+ @Override
+ public void onRenegotiationNeeded() {
+ // No need to do anything; AppRTC follows a pre-agreed-upon
+ // signaling/negotiation protocol.
+ }
+
+ @Override
+ public void onAddTrack(final RtpReceiver receiver, final MediaStream[] mediaStreams) {}
+
+ @Override
+ public void onRemoveTrack(final RtpReceiver receiver) {}
+ }
+
+ // Implementation detail: handle offer creation/signaling and answer setting,
+ // as well as adding remote ICE candidates once the answer SDP is set.
+ private class SDPObserver implements SdpObserver {
+ @Override
+ public void onCreateSuccess(final SessionDescription desc) {
+ if (localDescription != null) {
+ reportError("Multiple SDP create.");
+ return;
+ }
+ String sdp = desc.description;
+ if (preferIsac) {
+ sdp = preferCodec(sdp, AUDIO_CODEC_ISAC, true);
+ }
+ if (isVideoCallEnabled()) {
+ sdp = preferCodec(sdp, getSdpVideoCodecName(peerConnectionParameters), false);
+ }
+ final SessionDescription newDesc = new SessionDescription(desc.type, sdp);
+ localDescription = newDesc;
+ executor.execute(() -> {
+ if (peerConnection != null && !isError) {
+ Log.d(TAG, "Set local SDP from " + desc.type);
+ peerConnection.setLocalDescription(sdpObserver, newDesc);
+ }
+ });
+ }
+
+ @Override
+ public void onSetSuccess() {
+ executor.execute(() -> {
+ if (peerConnection == null || isError) {
+ return;
+ }
+ if (isInitiator) {
+ // For offering peer connection we first create offer and set
+ // local SDP, then after receiving answer set remote SDP.
+ if (peerConnection.getRemoteDescription() == null) {
+ // We've just set our local SDP so time to send it.
+ Log.d(TAG, "Local SDP set succesfully");
+ events.onLocalDescription(localDescription);
+ } else {
+ // We've just set remote description, so drain remote
+ // and send local ICE candidates.
+ Log.d(TAG, "Remote SDP set succesfully");
+ drainCandidates();
+ }
+ } else {
+ // For answering peer connection we set remote SDP and then
+ // create answer and set local SDP.
+ if (peerConnection.getLocalDescription() != null) {
+ // We've just set our local SDP so time to send it, drain
+ // remote and send local ICE candidates.
+ Log.d(TAG, "Local SDP set succesfully");
+ events.onLocalDescription(localDescription);
+ drainCandidates();
+ } else {
+ // We've just set remote SDP - do nothing for now -
+ // answer will be created soon.
+ Log.d(TAG, "Remote SDP set succesfully");
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onCreateFailure(final String error) {
+ reportError("createSDP error: " + error);
+ }
+
+ @Override
+ public void onSetFailure(final String error) {
+ reportError("setSDP error: " + error);
+ }
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/RecordedAudioToFileController.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/RecordedAudioToFileController.java
new file mode 100644
index 0000000000..9787852feb
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/RecordedAudioToFileController.java
@@ -0,0 +1,143 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.media.AudioFormat;
+import android.os.Environment;
+import android.util.Log;
+import androidx.annotation.Nullable;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.concurrent.ExecutorService;
+import org.webrtc.audio.JavaAudioDeviceModule;
+import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback;
+
+/**
+ * Implements the AudioRecordSamplesReadyCallback interface and writes
+ * recorded raw audio samples to an output file.
+ */
+public class RecordedAudioToFileController implements SamplesReadyCallback {
+ private static final String TAG = "RecordedAudioToFile";
+ private static final long MAX_FILE_SIZE_IN_BYTES = 58348800L;
+
+ private final Object lock = new Object();
+ private final ExecutorService executor;
+ @Nullable private OutputStream rawAudioFileOutputStream;
+ private boolean isRunning;
+ private long fileSizeInBytes;
+
+ public RecordedAudioToFileController(ExecutorService executor) {
+ Log.d(TAG, "ctor");
+ this.executor = executor;
+ }
+
+ /**
+ * Should be called on the same executor thread as the one provided at
+ * construction.
+ */
+ public boolean start() {
+ Log.d(TAG, "start");
+ if (!isExternalStorageWritable()) {
+ Log.e(TAG, "Writing to external media is not possible");
+ return false;
+ }
+ synchronized (lock) {
+ isRunning = true;
+ }
+ return true;
+ }
+
+ /**
+ * Should be called on the same executor thread as the one provided at
+ * construction.
+ */
+ public void stop() {
+ Log.d(TAG, "stop");
+ synchronized (lock) {
+ isRunning = false;
+ if (rawAudioFileOutputStream != null) {
+ try {
+ rawAudioFileOutputStream.close();
+ } catch (IOException e) {
+ Log.e(TAG, "Failed to close file with saved input audio: " + e);
+ }
+ rawAudioFileOutputStream = null;
+ }
+ fileSizeInBytes = 0;
+ }
+ }
+
+ // Checks if external storage is available for read and write.
+ private boolean isExternalStorageWritable() {
+ String state = Environment.getExternalStorageState();
+ if (Environment.MEDIA_MOUNTED.equals(state)) {
+ return true;
+ }
+ return false;
+ }
+
+ // Utilizes audio parameters to create a file name which contains sufficient
+ // information so that the file can be played using an external file player.
+ // Example: /sdcard/recorded_audio_16bits_48000Hz_mono.pcm.
+ private void openRawAudioOutputFile(int sampleRate, int channelCount) {
+ final String fileName = Environment.getExternalStorageDirectory().getPath() + File.separator
+ + "recorded_audio_16bits_" + String.valueOf(sampleRate) + "Hz"
+ + ((channelCount == 1) ? "_mono" : "_stereo") + ".pcm";
+ final File outputFile = new File(fileName);
+ try {
+ rawAudioFileOutputStream = new FileOutputStream(outputFile);
+ } catch (FileNotFoundException e) {
+ Log.e(TAG, "Failed to open audio output file: " + e.getMessage());
+ }
+ Log.d(TAG, "Opened file for recording: " + fileName);
+ }
+
+ // Called when new audio samples are ready.
+ @Override
+ public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples samples) {
+ // The native audio layer on Android should use 16-bit PCM format.
+ if (samples.getAudioFormat() != AudioFormat.ENCODING_PCM_16BIT) {
+ Log.e(TAG, "Invalid audio format");
+ return;
+ }
+ synchronized (lock) {
+ // Abort early if stop() has been called.
+ if (!isRunning) {
+ return;
+ }
+ // Open a new file for the first callback only since it allows us to add audio parameters to
+ // the file name.
+ if (rawAudioFileOutputStream == null) {
+ openRawAudioOutputFile(samples.getSampleRate(), samples.getChannelCount());
+ fileSizeInBytes = 0;
+ }
+ }
+ // Append the recorded 16-bit audio samples to the open output file.
+ executor.execute(() -> {
+ if (rawAudioFileOutputStream != null) {
+ try {
+ // Set a limit on max file size. 58348800 bytes corresponds to
+ // approximately 10 minutes of recording in mono at 48kHz.
+ if (fileSizeInBytes < MAX_FILE_SIZE_IN_BYTES) {
+ // Writes samples.getData().length bytes to output stream.
+ rawAudioFileOutputStream.write(samples.getData());
+ fileSizeInBytes += samples.getData().length;
+ }
+ } catch (IOException e) {
+ Log.e(TAG, "Failed to write audio to file: " + e.getMessage());
+ }
+ }
+ });
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/RoomParametersFetcher.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/RoomParametersFetcher.java
new file mode 100644
index 0000000000..6a0f235528
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/RoomParametersFetcher.java
@@ -0,0 +1,226 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.util.Log;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.Scanner;
+import java.util.List;
+import org.appspot.apprtc.AppRTCClient.SignalingParameters;
+import org.appspot.apprtc.util.AsyncHttpURLConnection;
+import org.appspot.apprtc.util.AsyncHttpURLConnection.AsyncHttpEvents;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.webrtc.IceCandidate;
+import org.webrtc.PeerConnection;
+import org.webrtc.SessionDescription;
+
+/**
+ * AsyncTask that converts an AppRTC room URL into the set of signaling
+ * parameters to use with that room.
+ */
+public class RoomParametersFetcher {
+ private static final String TAG = "RoomRTCClient";
+ private static final int TURN_HTTP_TIMEOUT_MS = 5000;
+ private final RoomParametersFetcherEvents events;
+ private final String roomUrl;
+ private final String roomMessage;
+
+ /**
+ * Room parameters fetcher callbacks.
+ */
+ public interface RoomParametersFetcherEvents {
+ /**
+ * Callback fired once the room's signaling parameters
+ * SignalingParameters are extracted.
+ */
+ void onSignalingParametersReady(final SignalingParameters params);
+
+ /**
+ * Callback for room parameters extraction error.
+ */
+ void onSignalingParametersError(final String description);
+ }
+
+ public RoomParametersFetcher(
+ String roomUrl, String roomMessage, final RoomParametersFetcherEvents events) {
+ this.roomUrl = roomUrl;
+ this.roomMessage = roomMessage;
+ this.events = events;
+ }
+
+ public void makeRequest() {
+ Log.d(TAG, "Connecting to room: " + roomUrl);
+ AsyncHttpURLConnection httpConnection =
+ new AsyncHttpURLConnection("POST", roomUrl, roomMessage, new AsyncHttpEvents() {
+ @Override
+ public void onHttpError(String errorMessage) {
+ Log.e(TAG, "Room connection error: " + errorMessage);
+ events.onSignalingParametersError(errorMessage);
+ }
+
+ @Override
+ public void onHttpComplete(String response) {
+ roomHttpResponseParse(response);
+ }
+ });
+ httpConnection.send();
+ }
+
+ private void roomHttpResponseParse(String response) {
+ Log.d(TAG, "Room response: " + response);
+ try {
+ List<IceCandidate> iceCandidates = null;
+ SessionDescription offerSdp = null;
+ JSONObject roomJson = new JSONObject(response);
+
+ String result = roomJson.getString("result");
+ if (!result.equals("SUCCESS")) {
+ events.onSignalingParametersError("Room response error: " + result);
+ return;
+ }
+ response = roomJson.getString("params");
+ roomJson = new JSONObject(response);
+ String roomId = roomJson.getString("room_id");
+ String clientId = roomJson.getString("client_id");
+ String wssUrl = roomJson.getString("wss_url");
+ String wssPostUrl = roomJson.getString("wss_post_url");
+ boolean initiator = (roomJson.getBoolean("is_initiator"));
+ if (!initiator) {
+ iceCandidates = new ArrayList<>();
+ String messagesString = roomJson.getString("messages");
+ JSONArray messages = new JSONArray(messagesString);
+ for (int i = 0; i < messages.length(); ++i) {
+ String messageString = messages.getString(i);
+ JSONObject message = new JSONObject(messageString);
+ String messageType = message.getString("type");
+ Log.d(TAG, "GAE->C #" + i + " : " + messageString);
+ if (messageType.equals("offer")) {
+ offerSdp = new SessionDescription(
+ SessionDescription.Type.fromCanonicalForm(messageType), message.getString("sdp"));
+ } else if (messageType.equals("candidate")) {
+ IceCandidate candidate = new IceCandidate(
+ message.getString("id"), message.getInt("label"), message.getString("candidate"));
+ iceCandidates.add(candidate);
+ } else {
+ Log.e(TAG, "Unknown message: " + messageString);
+ }
+ }
+ }
+ Log.d(TAG, "RoomId: " + roomId + ". ClientId: " + clientId);
+ Log.d(TAG, "Initiator: " + initiator);
+ Log.d(TAG, "WSS url: " + wssUrl);
+ Log.d(TAG, "WSS POST url: " + wssPostUrl);
+
+ List<PeerConnection.IceServer> iceServers =
+ iceServersFromPCConfigJSON(roomJson.getString("pc_config"));
+ boolean isTurnPresent = false;
+ for (PeerConnection.IceServer server : iceServers) {
+ Log.d(TAG, "IceServer: " + server);
+ for (String uri : server.urls) {
+ if (uri.startsWith("turn:")) {
+ isTurnPresent = true;
+ break;
+ }
+ }
+ }
+ // Request TURN servers.
+ if (!isTurnPresent && !roomJson.optString("ice_server_url").isEmpty()) {
+ List<PeerConnection.IceServer> turnServers =
+ requestTurnServers(roomJson.getString("ice_server_url"));
+ for (PeerConnection.IceServer turnServer : turnServers) {
+ Log.d(TAG, "TurnServer: " + turnServer);
+ iceServers.add(turnServer);
+ }
+ }
+
+ SignalingParameters params = new SignalingParameters(
+ iceServers, initiator, clientId, wssUrl, wssPostUrl, offerSdp, iceCandidates);
+ events.onSignalingParametersReady(params);
+ } catch (JSONException e) {
+ events.onSignalingParametersError("Room JSON parsing error: " + e.toString());
+ } catch (IOException e) {
+ events.onSignalingParametersError("Room IO error: " + e.toString());
+ }
+ }
+
+ // Requests & returns a TURN ICE Server based on a request URL. Must be run
+ // off the main thread!
+ @SuppressWarnings("UseNetworkAnnotations")
+ private List<PeerConnection.IceServer> requestTurnServers(String url)
+ throws IOException, JSONException {
+ List<PeerConnection.IceServer> turnServers = new ArrayList<>();
+ Log.d(TAG, "Request TURN from: " + url);
+ HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
+ connection.setDoOutput(true);
+ connection.setRequestProperty("REFERER", "https://appr.tc");
+ connection.setConnectTimeout(TURN_HTTP_TIMEOUT_MS);
+ connection.setReadTimeout(TURN_HTTP_TIMEOUT_MS);
+ int responseCode = connection.getResponseCode();
+ if (responseCode != 200) {
+ throw new IOException("Non-200 response when requesting TURN server from " + url + " : "
+ + connection.getHeaderField(null));
+ }
+ InputStream responseStream = connection.getInputStream();
+ String response = drainStream(responseStream);
+ connection.disconnect();
+ Log.d(TAG, "TURN response: " + response);
+ JSONObject responseJSON = new JSONObject(response);
+ JSONArray iceServers = responseJSON.getJSONArray("iceServers");
+ for (int i = 0; i < iceServers.length(); ++i) {
+ JSONObject server = iceServers.getJSONObject(i);
+ JSONArray turnUrls = server.getJSONArray("urls");
+ String username = server.has("username") ? server.getString("username") : "";
+ String credential = server.has("credential") ? server.getString("credential") : "";
+ for (int j = 0; j < turnUrls.length(); j++) {
+ String turnUrl = turnUrls.getString(j);
+ PeerConnection.IceServer turnServer =
+ PeerConnection.IceServer.builder(turnUrl)
+ .setUsername(username)
+ .setPassword(credential)
+ .createIceServer();
+ turnServers.add(turnServer);
+ }
+ }
+ return turnServers;
+ }
+
+ // Return the list of ICE servers described by a WebRTCPeerConnection
+ // configuration string.
+ private List<PeerConnection.IceServer> iceServersFromPCConfigJSON(String pcConfig)
+ throws JSONException {
+ JSONObject json = new JSONObject(pcConfig);
+ JSONArray servers = json.getJSONArray("iceServers");
+ List<PeerConnection.IceServer> ret = new ArrayList<>();
+ for (int i = 0; i < servers.length(); ++i) {
+ JSONObject server = servers.getJSONObject(i);
+ String url = server.getString("urls");
+ String credential = server.has("credential") ? server.getString("credential") : "";
+ PeerConnection.IceServer turnServer =
+ PeerConnection.IceServer.builder(url)
+ .setPassword(credential)
+ .createIceServer();
+ ret.add(turnServer);
+ }
+ return ret;
+ }
+
+ // Return the contents of an InputStream as a String.
+ private static String drainStream(InputStream in) {
+ Scanner s = new Scanner(in, "UTF-8").useDelimiter("\\A");
+ return s.hasNext() ? s.next() : "";
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/RtcEventLog.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/RtcEventLog.java
new file mode 100644
index 0000000000..103ad10f0b
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/RtcEventLog.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.os.ParcelFileDescriptor;
+import android.util.Log;
+import java.io.File;
+import java.io.IOException;
+import org.webrtc.PeerConnection;
+
+public class RtcEventLog {
+ private static final String TAG = "RtcEventLog";
+ private static final int OUTPUT_FILE_MAX_BYTES = 10_000_000;
+ private final PeerConnection peerConnection;
+ private RtcEventLogState state = RtcEventLogState.INACTIVE;
+
+ enum RtcEventLogState {
+ INACTIVE,
+ STARTED,
+ STOPPED,
+ }
+
+ public RtcEventLog(PeerConnection peerConnection) {
+ if (peerConnection == null) {
+ throw new NullPointerException("The peer connection is null.");
+ }
+ this.peerConnection = peerConnection;
+ }
+
+ public void start(final File outputFile) {
+ if (state == RtcEventLogState.STARTED) {
+ Log.e(TAG, "RtcEventLog has already started.");
+ return;
+ }
+ final ParcelFileDescriptor fileDescriptor;
+ try {
+ fileDescriptor = ParcelFileDescriptor.open(outputFile,
+ ParcelFileDescriptor.MODE_READ_WRITE | ParcelFileDescriptor.MODE_CREATE
+ | ParcelFileDescriptor.MODE_TRUNCATE);
+ } catch (IOException e) {
+ Log.e(TAG, "Failed to create a new file", e);
+ return;
+ }
+
+ // Passes ownership of the file to WebRTC.
+ boolean success =
+ peerConnection.startRtcEventLog(fileDescriptor.detachFd(), OUTPUT_FILE_MAX_BYTES);
+ if (!success) {
+ Log.e(TAG, "Failed to start RTC event log.");
+ return;
+ }
+ state = RtcEventLogState.STARTED;
+ Log.d(TAG, "RtcEventLog started.");
+ }
+
+ public void stop() {
+ if (state != RtcEventLogState.STARTED) {
+ Log.e(TAG, "RtcEventLog was not started.");
+ return;
+ }
+ peerConnection.stopRtcEventLog();
+ state = RtcEventLogState.STOPPED;
+ Log.d(TAG, "RtcEventLog stopped.");
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/SettingsActivity.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/SettingsActivity.java
new file mode 100644
index 0000000000..e9c6f6b798
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/SettingsActivity.java
@@ -0,0 +1,317 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.app.Activity;
+import android.content.SharedPreferences;
+import android.content.SharedPreferences.OnSharedPreferenceChangeListener;
+import android.os.Bundle;
+import android.preference.ListPreference;
+import android.preference.Preference;
+import org.webrtc.Camera2Enumerator;
+import org.webrtc.audio.JavaAudioDeviceModule;
+
+/**
+ * Settings activity for AppRTC.
+ */
+public class SettingsActivity extends Activity implements OnSharedPreferenceChangeListener {
+ private SettingsFragment settingsFragment;
+ private String keyprefVideoCall;
+ private String keyprefScreencapture;
+ private String keyprefCamera2;
+ private String keyprefResolution;
+ private String keyprefFps;
+ private String keyprefCaptureQualitySlider;
+ private String keyprefMaxVideoBitrateType;
+ private String keyprefMaxVideoBitrateValue;
+ private String keyPrefVideoCodec;
+ private String keyprefHwCodec;
+ private String keyprefCaptureToTexture;
+ private String keyprefFlexfec;
+
+ private String keyprefStartAudioBitrateType;
+ private String keyprefStartAudioBitrateValue;
+ private String keyPrefAudioCodec;
+ private String keyprefNoAudioProcessing;
+ private String keyprefAecDump;
+ private String keyprefEnableSaveInputAudioToFile;
+ private String keyprefOpenSLES;
+ private String keyprefDisableBuiltInAEC;
+ private String keyprefDisableBuiltInAGC;
+ private String keyprefDisableBuiltInNS;
+ private String keyprefDisableWebRtcAGCAndHPF;
+ private String keyprefSpeakerphone;
+
+ private String keyPrefRoomServerUrl;
+ private String keyPrefDisplayHud;
+ private String keyPrefTracing;
+ private String keyprefEnabledRtcEventLog;
+
+ private String keyprefEnableDataChannel;
+ private String keyprefOrdered;
+ private String keyprefMaxRetransmitTimeMs;
+ private String keyprefMaxRetransmits;
+ private String keyprefDataProtocol;
+ private String keyprefNegotiated;
+ private String keyprefDataId;
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ keyprefVideoCall = getString(R.string.pref_videocall_key);
+ keyprefScreencapture = getString(R.string.pref_screencapture_key);
+ keyprefCamera2 = getString(R.string.pref_camera2_key);
+ keyprefResolution = getString(R.string.pref_resolution_key);
+ keyprefFps = getString(R.string.pref_fps_key);
+ keyprefCaptureQualitySlider = getString(R.string.pref_capturequalityslider_key);
+ keyprefMaxVideoBitrateType = getString(R.string.pref_maxvideobitrate_key);
+ keyprefMaxVideoBitrateValue = getString(R.string.pref_maxvideobitratevalue_key);
+ keyPrefVideoCodec = getString(R.string.pref_videocodec_key);
+ keyprefHwCodec = getString(R.string.pref_hwcodec_key);
+ keyprefCaptureToTexture = getString(R.string.pref_capturetotexture_key);
+ keyprefFlexfec = getString(R.string.pref_flexfec_key);
+
+ keyprefStartAudioBitrateType = getString(R.string.pref_startaudiobitrate_key);
+ keyprefStartAudioBitrateValue = getString(R.string.pref_startaudiobitratevalue_key);
+ keyPrefAudioCodec = getString(R.string.pref_audiocodec_key);
+ keyprefNoAudioProcessing = getString(R.string.pref_noaudioprocessing_key);
+ keyprefAecDump = getString(R.string.pref_aecdump_key);
+ keyprefEnableSaveInputAudioToFile =
+ getString(R.string.pref_enable_save_input_audio_to_file_key);
+ keyprefOpenSLES = getString(R.string.pref_opensles_key);
+ keyprefDisableBuiltInAEC = getString(R.string.pref_disable_built_in_aec_key);
+ keyprefDisableBuiltInAGC = getString(R.string.pref_disable_built_in_agc_key);
+ keyprefDisableBuiltInNS = getString(R.string.pref_disable_built_in_ns_key);
+ keyprefDisableWebRtcAGCAndHPF = getString(R.string.pref_disable_webrtc_agc_and_hpf_key);
+ keyprefSpeakerphone = getString(R.string.pref_speakerphone_key);
+
+ keyprefEnableDataChannel = getString(R.string.pref_enable_datachannel_key);
+ keyprefOrdered = getString(R.string.pref_ordered_key);
+ keyprefMaxRetransmitTimeMs = getString(R.string.pref_max_retransmit_time_ms_key);
+ keyprefMaxRetransmits = getString(R.string.pref_max_retransmits_key);
+ keyprefDataProtocol = getString(R.string.pref_data_protocol_key);
+ keyprefNegotiated = getString(R.string.pref_negotiated_key);
+ keyprefDataId = getString(R.string.pref_data_id_key);
+
+ keyPrefRoomServerUrl = getString(R.string.pref_room_server_url_key);
+ keyPrefDisplayHud = getString(R.string.pref_displayhud_key);
+ keyPrefTracing = getString(R.string.pref_tracing_key);
+ keyprefEnabledRtcEventLog = getString(R.string.pref_enable_rtceventlog_key);
+
+ // Display the fragment as the main content.
+ settingsFragment = new SettingsFragment();
+ getFragmentManager()
+ .beginTransaction()
+ .replace(android.R.id.content, settingsFragment)
+ .commit();
+ }
+
+ @Override
+ protected void onResume() {
+ super.onResume();
+ // Set summary to be the user-description for the selected value
+ SharedPreferences sharedPreferences =
+ settingsFragment.getPreferenceScreen().getSharedPreferences();
+ sharedPreferences.registerOnSharedPreferenceChangeListener(this);
+ updateSummaryB(sharedPreferences, keyprefVideoCall);
+ updateSummaryB(sharedPreferences, keyprefScreencapture);
+ updateSummaryB(sharedPreferences, keyprefCamera2);
+ updateSummary(sharedPreferences, keyprefResolution);
+ updateSummary(sharedPreferences, keyprefFps);
+ updateSummaryB(sharedPreferences, keyprefCaptureQualitySlider);
+ updateSummary(sharedPreferences, keyprefMaxVideoBitrateType);
+ updateSummaryBitrate(sharedPreferences, keyprefMaxVideoBitrateValue);
+ setVideoBitrateEnable(sharedPreferences);
+ updateSummary(sharedPreferences, keyPrefVideoCodec);
+ updateSummaryB(sharedPreferences, keyprefHwCodec);
+ updateSummaryB(sharedPreferences, keyprefCaptureToTexture);
+ updateSummaryB(sharedPreferences, keyprefFlexfec);
+
+ updateSummary(sharedPreferences, keyprefStartAudioBitrateType);
+ updateSummaryBitrate(sharedPreferences, keyprefStartAudioBitrateValue);
+ setAudioBitrateEnable(sharedPreferences);
+ updateSummary(sharedPreferences, keyPrefAudioCodec);
+ updateSummaryB(sharedPreferences, keyprefNoAudioProcessing);
+ updateSummaryB(sharedPreferences, keyprefAecDump);
+ updateSummaryB(sharedPreferences, keyprefEnableSaveInputAudioToFile);
+ updateSummaryB(sharedPreferences, keyprefOpenSLES);
+ updateSummaryB(sharedPreferences, keyprefDisableBuiltInAEC);
+ updateSummaryB(sharedPreferences, keyprefDisableBuiltInAGC);
+ updateSummaryB(sharedPreferences, keyprefDisableBuiltInNS);
+ updateSummaryB(sharedPreferences, keyprefDisableWebRtcAGCAndHPF);
+ updateSummaryList(sharedPreferences, keyprefSpeakerphone);
+
+ updateSummaryB(sharedPreferences, keyprefEnableDataChannel);
+ updateSummaryB(sharedPreferences, keyprefOrdered);
+ updateSummary(sharedPreferences, keyprefMaxRetransmitTimeMs);
+ updateSummary(sharedPreferences, keyprefMaxRetransmits);
+ updateSummary(sharedPreferences, keyprefDataProtocol);
+ updateSummaryB(sharedPreferences, keyprefNegotiated);
+ updateSummary(sharedPreferences, keyprefDataId);
+ setDataChannelEnable(sharedPreferences);
+
+ updateSummary(sharedPreferences, keyPrefRoomServerUrl);
+ updateSummaryB(sharedPreferences, keyPrefDisplayHud);
+ updateSummaryB(sharedPreferences, keyPrefTracing);
+ updateSummaryB(sharedPreferences, keyprefEnabledRtcEventLog);
+
+ if (!Camera2Enumerator.isSupported(this)) {
+ Preference camera2Preference = settingsFragment.findPreference(keyprefCamera2);
+
+ camera2Preference.setSummary(getString(R.string.pref_camera2_not_supported));
+ camera2Preference.setEnabled(false);
+ }
+
+ if (!JavaAudioDeviceModule.isBuiltInAcousticEchoCancelerSupported()) {
+ Preference disableBuiltInAECPreference =
+ settingsFragment.findPreference(keyprefDisableBuiltInAEC);
+
+ disableBuiltInAECPreference.setSummary(getString(R.string.pref_built_in_aec_not_available));
+ disableBuiltInAECPreference.setEnabled(false);
+ }
+
+ Preference disableBuiltInAGCPreference =
+ settingsFragment.findPreference(keyprefDisableBuiltInAGC);
+
+ disableBuiltInAGCPreference.setSummary(getString(R.string.pref_built_in_agc_not_available));
+ disableBuiltInAGCPreference.setEnabled(false);
+
+ if (!JavaAudioDeviceModule.isBuiltInNoiseSuppressorSupported()) {
+ Preference disableBuiltInNSPreference =
+ settingsFragment.findPreference(keyprefDisableBuiltInNS);
+
+ disableBuiltInNSPreference.setSummary(getString(R.string.pref_built_in_ns_not_available));
+ disableBuiltInNSPreference.setEnabled(false);
+ }
+ }
+
+ @Override
+ protected void onPause() {
+ super.onPause();
+ SharedPreferences sharedPreferences =
+ settingsFragment.getPreferenceScreen().getSharedPreferences();
+ sharedPreferences.unregisterOnSharedPreferenceChangeListener(this);
+ }
+
+ @Override
+ public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
+ // clang-format off
+ if (key.equals(keyprefResolution)
+ || key.equals(keyprefFps)
+ || key.equals(keyprefMaxVideoBitrateType)
+ || key.equals(keyPrefVideoCodec)
+ || key.equals(keyprefStartAudioBitrateType)
+ || key.equals(keyPrefAudioCodec)
+ || key.equals(keyPrefRoomServerUrl)
+ || key.equals(keyprefMaxRetransmitTimeMs)
+ || key.equals(keyprefMaxRetransmits)
+ || key.equals(keyprefDataProtocol)
+ || key.equals(keyprefDataId)) {
+ updateSummary(sharedPreferences, key);
+ } else if (key.equals(keyprefMaxVideoBitrateValue)
+ || key.equals(keyprefStartAudioBitrateValue)) {
+ updateSummaryBitrate(sharedPreferences, key);
+ } else if (key.equals(keyprefVideoCall)
+ || key.equals(keyprefScreencapture)
+ || key.equals(keyprefCamera2)
+ || key.equals(keyPrefTracing)
+ || key.equals(keyprefCaptureQualitySlider)
+ || key.equals(keyprefHwCodec)
+ || key.equals(keyprefCaptureToTexture)
+ || key.equals(keyprefFlexfec)
+ || key.equals(keyprefNoAudioProcessing)
+ || key.equals(keyprefAecDump)
+ || key.equals(keyprefEnableSaveInputAudioToFile)
+ || key.equals(keyprefOpenSLES)
+ || key.equals(keyprefDisableBuiltInAEC)
+ || key.equals(keyprefDisableBuiltInAGC)
+ || key.equals(keyprefDisableBuiltInNS)
+ || key.equals(keyprefDisableWebRtcAGCAndHPF)
+ || key.equals(keyPrefDisplayHud)
+ || key.equals(keyprefEnableDataChannel)
+ || key.equals(keyprefOrdered)
+ || key.equals(keyprefNegotiated)
+ || key.equals(keyprefEnabledRtcEventLog)) {
+ updateSummaryB(sharedPreferences, key);
+ } else if (key.equals(keyprefSpeakerphone)) {
+ updateSummaryList(sharedPreferences, key);
+ }
+ // clang-format on
+ if (key.equals(keyprefMaxVideoBitrateType)) {
+ setVideoBitrateEnable(sharedPreferences);
+ }
+ if (key.equals(keyprefStartAudioBitrateType)) {
+ setAudioBitrateEnable(sharedPreferences);
+ }
+ if (key.equals(keyprefEnableDataChannel)) {
+ setDataChannelEnable(sharedPreferences);
+ }
+ }
+
+ private void updateSummary(SharedPreferences sharedPreferences, String key) {
+ Preference updatedPref = settingsFragment.findPreference(key);
+ // Set summary to be the user-description for the selected value
+ updatedPref.setSummary(sharedPreferences.getString(key, ""));
+ }
+
+ private void updateSummaryBitrate(SharedPreferences sharedPreferences, String key) {
+ Preference updatedPref = settingsFragment.findPreference(key);
+ updatedPref.setSummary(sharedPreferences.getString(key, "") + " kbps");
+ }
+
+ private void updateSummaryB(SharedPreferences sharedPreferences, String key) {
+ Preference updatedPref = settingsFragment.findPreference(key);
+ updatedPref.setSummary(sharedPreferences.getBoolean(key, true)
+ ? getString(R.string.pref_value_enabled)
+ : getString(R.string.pref_value_disabled));
+ }
+
+ private void updateSummaryList(SharedPreferences sharedPreferences, String key) {
+ ListPreference updatedPref = (ListPreference) settingsFragment.findPreference(key);
+ updatedPref.setSummary(updatedPref.getEntry());
+ }
+
+ private void setVideoBitrateEnable(SharedPreferences sharedPreferences) {
+ Preference bitratePreferenceValue =
+ settingsFragment.findPreference(keyprefMaxVideoBitrateValue);
+ String bitrateTypeDefault = getString(R.string.pref_maxvideobitrate_default);
+ String bitrateType =
+ sharedPreferences.getString(keyprefMaxVideoBitrateType, bitrateTypeDefault);
+ if (bitrateType.equals(bitrateTypeDefault)) {
+ bitratePreferenceValue.setEnabled(false);
+ } else {
+ bitratePreferenceValue.setEnabled(true);
+ }
+ }
+
+ private void setAudioBitrateEnable(SharedPreferences sharedPreferences) {
+ Preference bitratePreferenceValue =
+ settingsFragment.findPreference(keyprefStartAudioBitrateValue);
+ String bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default);
+ String bitrateType =
+ sharedPreferences.getString(keyprefStartAudioBitrateType, bitrateTypeDefault);
+ if (bitrateType.equals(bitrateTypeDefault)) {
+ bitratePreferenceValue.setEnabled(false);
+ } else {
+ bitratePreferenceValue.setEnabled(true);
+ }
+ }
+
+ private void setDataChannelEnable(SharedPreferences sharedPreferences) {
+ boolean enabled = sharedPreferences.getBoolean(keyprefEnableDataChannel, true);
+ settingsFragment.findPreference(keyprefOrdered).setEnabled(enabled);
+ settingsFragment.findPreference(keyprefMaxRetransmitTimeMs).setEnabled(enabled);
+ settingsFragment.findPreference(keyprefMaxRetransmits).setEnabled(enabled);
+ settingsFragment.findPreference(keyprefDataProtocol).setEnabled(enabled);
+ settingsFragment.findPreference(keyprefNegotiated).setEnabled(enabled);
+ settingsFragment.findPreference(keyprefDataId).setEnabled(enabled);
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/SettingsFragment.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/SettingsFragment.java
new file mode 100644
index 0000000000..d969bd7d32
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/SettingsFragment.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.os.Bundle;
+import android.preference.PreferenceFragment;
+
+/**
+ * Settings fragment for AppRTC.
+ */
+public class SettingsFragment extends PreferenceFragment {
+ @Override
+ public void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ // Load the preferences from an XML resource
+ addPreferencesFromResource(R.xml.preferences);
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/TCPChannelClient.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/TCPChannelClient.java
new file mode 100644
index 0000000000..d869d7ca66
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/TCPChannelClient.java
@@ -0,0 +1,362 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.util.Log;
+import androidx.annotation.Nullable;
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.OutputStreamWriter;
+import java.io.PrintWriter;
+import java.net.InetAddress;
+import java.net.ServerSocket;
+import java.net.Socket;
+import java.net.UnknownHostException;
+import java.nio.charset.Charset;
+import java.util.concurrent.ExecutorService;
+import org.webrtc.ThreadUtils;
+
+/**
+ * Replacement for WebSocketChannelClient for direct communication between two IP addresses. Handles
+ * the signaling between the two clients using a TCP connection.
+ * <p>
+ * All public methods should be called from a looper executor thread
+ * passed in a constructor, otherwise exception will be thrown.
+ * All events are dispatched on the same thread.
+ */
+public class TCPChannelClient {
+ private static final String TAG = "TCPChannelClient";
+
+ private final ExecutorService executor;
+ private final ThreadUtils.ThreadChecker executorThreadCheck;
+ private final TCPChannelEvents eventListener;
+ private TCPSocket socket;
+
+ /**
+ * Callback interface for messages delivered on TCP Connection. All callbacks are invoked from the
+ * looper executor thread.
+ */
+ public interface TCPChannelEvents {
+ void onTCPConnected(boolean server);
+ void onTCPMessage(String message);
+ void onTCPError(String description);
+ void onTCPClose();
+ }
+
+ /**
+ * Initializes the TCPChannelClient. If IP is a local IP address, starts a listening server on
+ * that IP. If not, instead connects to the IP.
+ *
+ * @param eventListener Listener that will receive events from the client.
+ * @param ip IP address to listen on or connect to.
+ * @param port Port to listen on or connect to.
+ */
+ public TCPChannelClient(
+ ExecutorService executor, TCPChannelEvents eventListener, String ip, int port) {
+ this.executor = executor;
+ executorThreadCheck = new ThreadUtils.ThreadChecker();
+ executorThreadCheck.detachThread();
+ this.eventListener = eventListener;
+
+ InetAddress address;
+ try {
+ address = InetAddress.getByName(ip);
+ } catch (UnknownHostException e) {
+ reportError("Invalid IP address.");
+ return;
+ }
+
+ if (address.isAnyLocalAddress()) {
+ socket = new TCPSocketServer(address, port);
+ } else {
+ socket = new TCPSocketClient(address, port);
+ }
+
+ socket.start();
+ }
+
+ /**
+ * Disconnects the client if not already disconnected. This will fire the onTCPClose event.
+ */
+ public void disconnect() {
+ executorThreadCheck.checkIsOnValidThread();
+
+ socket.disconnect();
+ }
+
+ /**
+ * Sends a message on the socket.
+ *
+ * @param message Message to be sent.
+ */
+ public void send(String message) {
+ executorThreadCheck.checkIsOnValidThread();
+
+ socket.send(message);
+ }
+
+ /**
+ * Helper method for firing onTCPError events. Calls onTCPError on the executor thread.
+ */
+ private void reportError(final String message) {
+ Log.e(TAG, "TCP Error: " + message);
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ eventListener.onTCPError(message);
+ }
+ });
+ }
+
+ /**
+ * Base class for server and client sockets. Contains a listening thread that will call
+ * eventListener.onTCPMessage on new messages.
+ */
+ private abstract class TCPSocket extends Thread {
+ // Lock for editing out and rawSocket
+ protected final Object rawSocketLock;
+ @Nullable
+ private PrintWriter out;
+ @Nullable
+ private Socket rawSocket;
+
+ /**
+ * Connect to the peer, potentially a slow operation.
+ *
+ * @return Socket connection, null if connection failed.
+ */
+ @Nullable
+ public abstract Socket connect();
+
+ /** Returns true if sockets is a server rawSocket. */
+ public abstract boolean isServer();
+
+ TCPSocket() {
+ rawSocketLock = new Object();
+ }
+
+ /**
+ * The listening thread.
+ */
+ @Override
+ public void run() {
+ Log.d(TAG, "Listening thread started...");
+
+ // Receive connection to temporary variable first, so we don't block.
+ Socket tempSocket = connect();
+ BufferedReader in;
+
+ Log.d(TAG, "TCP connection established.");
+
+ synchronized (rawSocketLock) {
+ if (rawSocket != null) {
+ Log.e(TAG, "Socket already existed and will be replaced.");
+ }
+
+ rawSocket = tempSocket;
+
+ // Connecting failed, error has already been reported, just exit.
+ if (rawSocket == null) {
+ return;
+ }
+
+ try {
+ out = new PrintWriter(
+ new OutputStreamWriter(rawSocket.getOutputStream(), Charset.forName("UTF-8")), true);
+ in = new BufferedReader(
+ new InputStreamReader(rawSocket.getInputStream(), Charset.forName("UTF-8")));
+ } catch (IOException e) {
+ reportError("Failed to open IO on rawSocket: " + e.getMessage());
+ return;
+ }
+ }
+
+ Log.v(TAG, "Execute onTCPConnected");
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ Log.v(TAG, "Run onTCPConnected");
+ eventListener.onTCPConnected(isServer());
+ }
+ });
+
+ while (true) {
+ final String message;
+ try {
+ message = in.readLine();
+ } catch (IOException e) {
+ synchronized (rawSocketLock) {
+ // If socket was closed, this is expected.
+ if (rawSocket == null) {
+ break;
+ }
+ }
+
+ reportError("Failed to read from rawSocket: " + e.getMessage());
+ break;
+ }
+
+ // No data received, rawSocket probably closed.
+ if (message == null) {
+ break;
+ }
+
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ Log.v(TAG, "Receive: " + message);
+ eventListener.onTCPMessage(message);
+ }
+ });
+ }
+
+ Log.d(TAG, "Receiving thread exiting...");
+
+ // Close the rawSocket if it is still open.
+ disconnect();
+ }
+
+ /** Closes the rawSocket if it is still open. Also fires the onTCPClose event. */
+ public void disconnect() {
+ try {
+ synchronized (rawSocketLock) {
+ if (rawSocket != null) {
+ rawSocket.close();
+ rawSocket = null;
+ out = null;
+
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ eventListener.onTCPClose();
+ }
+ });
+ }
+ }
+ } catch (IOException e) {
+ reportError("Failed to close rawSocket: " + e.getMessage());
+ }
+ }
+
+ /**
+ * Sends a message on the socket. Should only be called on the executor thread.
+ */
+ public void send(String message) {
+ Log.v(TAG, "Send: " + message);
+
+ synchronized (rawSocketLock) {
+ if (out == null) {
+ reportError("Sending data on closed socket.");
+ return;
+ }
+
+ out.write(message + "\n");
+ out.flush();
+ }
+ }
+ }
+
+ private class TCPSocketServer extends TCPSocket {
+ // Server socket is also guarded by rawSocketLock.
+ @Nullable
+ private ServerSocket serverSocket;
+
+ final private InetAddress address;
+ final private int port;
+
+ public TCPSocketServer(InetAddress address, int port) {
+ this.address = address;
+ this.port = port;
+ }
+
+ /** Opens a listening socket and waits for a connection. */
+ @Nullable
+ @Override
+ public Socket connect() {
+ Log.d(TAG, "Listening on [" + address.getHostAddress() + "]:" + Integer.toString(port));
+
+ final ServerSocket tempSocket;
+ try {
+ tempSocket = new ServerSocket(port, 0, address);
+ } catch (IOException e) {
+ reportError("Failed to create server socket: " + e.getMessage());
+ return null;
+ }
+
+ synchronized (rawSocketLock) {
+ if (serverSocket != null) {
+ Log.e(TAG, "Server rawSocket was already listening and new will be opened.");
+ }
+
+ serverSocket = tempSocket;
+ }
+
+ try {
+ return tempSocket.accept();
+ } catch (IOException e) {
+ reportError("Failed to receive connection: " + e.getMessage());
+ return null;
+ }
+ }
+
+ /** Closes the listening socket and calls super. */
+ @Override
+ public void disconnect() {
+ try {
+ synchronized (rawSocketLock) {
+ if (serverSocket != null) {
+ serverSocket.close();
+ serverSocket = null;
+ }
+ }
+ } catch (IOException e) {
+ reportError("Failed to close server socket: " + e.getMessage());
+ }
+
+ super.disconnect();
+ }
+
+ @Override
+ public boolean isServer() {
+ return true;
+ }
+ }
+
+ private class TCPSocketClient extends TCPSocket {
+ final private InetAddress address;
+ final private int port;
+
+ public TCPSocketClient(InetAddress address, int port) {
+ this.address = address;
+ this.port = port;
+ }
+
+ /** Connects to the peer. */
+ @Nullable
+ @Override
+ public Socket connect() {
+ Log.d(TAG, "Connecting to [" + address.getHostAddress() + "]:" + Integer.toString(port));
+
+ try {
+ return new Socket(address, port);
+ } catch (IOException e) {
+ reportError("Failed to connect: " + e.getMessage());
+ return null;
+ }
+ }
+
+ @Override
+ public boolean isServer() {
+ return false;
+ }
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/UnhandledExceptionHandler.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/UnhandledExceptionHandler.java
new file mode 100644
index 0000000000..b256400119
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/UnhandledExceptionHandler.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2013 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.app.Activity;
+import android.app.AlertDialog;
+import android.content.DialogInterface;
+import android.util.Log;
+import android.util.TypedValue;
+import android.widget.ScrollView;
+import android.widget.TextView;
+
+import java.io.PrintWriter;
+import java.io.StringWriter;
+
+/**
+ * Singleton helper: install a default unhandled exception handler which shows
+ * an informative dialog and kills the app. Useful for apps whose
+ * error-handling consists of throwing RuntimeExceptions.
+ * NOTE: almost always more useful to
+ * Thread.setDefaultUncaughtExceptionHandler() rather than
+ * Thread.setUncaughtExceptionHandler(), to apply to background threads as well.
+ */
+public class UnhandledExceptionHandler implements Thread.UncaughtExceptionHandler {
+ private static final String TAG = "AppRTCMobileActivity";
+ private final Activity activity;
+
+ public UnhandledExceptionHandler(final Activity activity) {
+ this.activity = activity;
+ }
+
+ @Override
+ public void uncaughtException(Thread unusedThread, final Throwable e) {
+ activity.runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ String title = "Fatal error: " + getTopLevelCauseMessage(e);
+ String msg = getRecursiveStackTrace(e);
+ TextView errorView = new TextView(activity);
+ errorView.setText(msg);
+ errorView.setTextSize(TypedValue.COMPLEX_UNIT_SP, 8);
+ ScrollView scrollingContainer = new ScrollView(activity);
+ scrollingContainer.addView(errorView);
+ Log.e(TAG, title + "\n\n" + msg);
+ DialogInterface.OnClickListener listener = new DialogInterface.OnClickListener() {
+ @Override
+ public void onClick(DialogInterface dialog, int which) {
+ dialog.dismiss();
+ System.exit(1);
+ }
+ };
+ AlertDialog.Builder builder = new AlertDialog.Builder(activity);
+ builder.setTitle(title)
+ .setView(scrollingContainer)
+ .setPositiveButton("Exit", listener)
+ .show();
+ }
+ });
+ }
+
+ // Returns the Message attached to the original Cause of `t`.
+ private static String getTopLevelCauseMessage(Throwable t) {
+ Throwable topLevelCause = t;
+ while (topLevelCause.getCause() != null) {
+ topLevelCause = topLevelCause.getCause();
+ }
+ return topLevelCause.getMessage();
+ }
+
+ // Returns a human-readable String of the stacktrace in `t`, recursively
+ // through all Causes that led to `t`.
+ private static String getRecursiveStackTrace(Throwable t) {
+ StringWriter writer = new StringWriter();
+ t.printStackTrace(new PrintWriter(writer));
+ return writer.toString();
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/WebSocketChannelClient.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/WebSocketChannelClient.java
new file mode 100644
index 0000000000..5fa410889a
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/WebSocketChannelClient.java
@@ -0,0 +1,296 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.os.Handler;
+import android.util.Log;
+import androidx.annotation.Nullable;
+import de.tavendo.autobahn.WebSocket.WebSocketConnectionObserver;
+import de.tavendo.autobahn.WebSocketConnection;
+import de.tavendo.autobahn.WebSocketException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.List;
+import org.appspot.apprtc.util.AsyncHttpURLConnection;
+import org.appspot.apprtc.util.AsyncHttpURLConnection.AsyncHttpEvents;
+import org.json.JSONException;
+import org.json.JSONObject;
+
+/**
+ * WebSocket client implementation.
+ *
+ * <p>All public methods should be called from a looper executor thread
+ * passed in a constructor, otherwise exception will be thrown.
+ * All events are dispatched on the same thread.
+ */
+public class WebSocketChannelClient {
+ private static final String TAG = "WSChannelRTCClient";
+ private static final int CLOSE_TIMEOUT = 1000;
+ private final WebSocketChannelEvents events;
+ private final Handler handler;
+ private WebSocketConnection ws;
+ private String wsServerUrl;
+ private String postServerUrl;
+ @Nullable
+ private String roomID;
+ @Nullable
+ private String clientID;
+ private WebSocketConnectionState state;
+ // Do not remove this member variable. If this is removed, the observer gets garbage collected and
+ // this causes test breakages.
+ private WebSocketObserver wsObserver;
+ private final Object closeEventLock = new Object();
+ private boolean closeEvent;
+ // WebSocket send queue. Messages are added to the queue when WebSocket
+ // client is not registered and are consumed in register() call.
+ private final List<String> wsSendQueue = new ArrayList<>();
+
+ /**
+ * Possible WebSocket connection states.
+ */
+ public enum WebSocketConnectionState { NEW, CONNECTED, REGISTERED, CLOSED, ERROR }
+
+ /**
+ * Callback interface for messages delivered on WebSocket.
+ * All events are dispatched from a looper executor thread.
+ */
+ public interface WebSocketChannelEvents {
+ void onWebSocketMessage(final String message);
+ void onWebSocketClose();
+ void onWebSocketError(final String description);
+ }
+
+ public WebSocketChannelClient(Handler handler, WebSocketChannelEvents events) {
+ this.handler = handler;
+ this.events = events;
+ roomID = null;
+ clientID = null;
+ state = WebSocketConnectionState.NEW;
+ }
+
+ public WebSocketConnectionState getState() {
+ return state;
+ }
+
+ public void connect(final String wsUrl, final String postUrl) {
+ checkIfCalledOnValidThread();
+ if (state != WebSocketConnectionState.NEW) {
+ Log.e(TAG, "WebSocket is already connected.");
+ return;
+ }
+ wsServerUrl = wsUrl;
+ postServerUrl = postUrl;
+ closeEvent = false;
+
+ Log.d(TAG, "Connecting WebSocket to: " + wsUrl + ". Post URL: " + postUrl);
+ ws = new WebSocketConnection();
+ wsObserver = new WebSocketObserver();
+ try {
+ ws.connect(new URI(wsServerUrl), wsObserver);
+ } catch (URISyntaxException e) {
+ reportError("URI error: " + e.getMessage());
+ } catch (WebSocketException e) {
+ reportError("WebSocket connection error: " + e.getMessage());
+ }
+ }
+
+ public void register(final String roomID, final String clientID) {
+ checkIfCalledOnValidThread();
+ this.roomID = roomID;
+ this.clientID = clientID;
+ if (state != WebSocketConnectionState.CONNECTED) {
+ Log.w(TAG, "WebSocket register() in state " + state);
+ return;
+ }
+ Log.d(TAG, "Registering WebSocket for room " + roomID + ". ClientID: " + clientID);
+ JSONObject json = new JSONObject();
+ try {
+ json.put("cmd", "register");
+ json.put("roomid", roomID);
+ json.put("clientid", clientID);
+ Log.d(TAG, "C->WSS: " + json.toString());
+ ws.sendTextMessage(json.toString());
+ state = WebSocketConnectionState.REGISTERED;
+ // Send any previously accumulated messages.
+ for (String sendMessage : wsSendQueue) {
+ send(sendMessage);
+ }
+ wsSendQueue.clear();
+ } catch (JSONException e) {
+ reportError("WebSocket register JSON error: " + e.getMessage());
+ }
+ }
+
+ public void send(String message) {
+ checkIfCalledOnValidThread();
+ switch (state) {
+ case NEW:
+ case CONNECTED:
+ // Store outgoing messages and send them after websocket client
+ // is registered.
+ Log.d(TAG, "WS ACC: " + message);
+ wsSendQueue.add(message);
+ return;
+ case ERROR:
+ case CLOSED:
+ Log.e(TAG, "WebSocket send() in error or closed state : " + message);
+ return;
+ case REGISTERED:
+ JSONObject json = new JSONObject();
+ try {
+ json.put("cmd", "send");
+ json.put("msg", message);
+ message = json.toString();
+ Log.d(TAG, "C->WSS: " + message);
+ ws.sendTextMessage(message);
+ } catch (JSONException e) {
+ reportError("WebSocket send JSON error: " + e.getMessage());
+ }
+ break;
+ }
+ }
+
+ // This call can be used to send WebSocket messages before WebSocket
+ // connection is opened.
+ public void post(String message) {
+ checkIfCalledOnValidThread();
+ sendWSSMessage("POST", message);
+ }
+
+ public void disconnect(boolean waitForComplete) {
+ checkIfCalledOnValidThread();
+ Log.d(TAG, "Disconnect WebSocket. State: " + state);
+ if (state == WebSocketConnectionState.REGISTERED) {
+ // Send "bye" to WebSocket server.
+ send("{\"type\": \"bye\"}");
+ state = WebSocketConnectionState.CONNECTED;
+ // Send http DELETE to http WebSocket server.
+ sendWSSMessage("DELETE", "");
+ }
+ // Close WebSocket in CONNECTED or ERROR states only.
+ if (state == WebSocketConnectionState.CONNECTED || state == WebSocketConnectionState.ERROR) {
+ ws.disconnect();
+ state = WebSocketConnectionState.CLOSED;
+
+ // Wait for websocket close event to prevent websocket library from
+ // sending any pending messages to deleted looper thread.
+ if (waitForComplete) {
+ synchronized (closeEventLock) {
+ while (!closeEvent) {
+ try {
+ closeEventLock.wait(CLOSE_TIMEOUT);
+ break;
+ } catch (InterruptedException e) {
+ Log.e(TAG, "Wait error: " + e.toString());
+ }
+ }
+ }
+ }
+ }
+ Log.d(TAG, "Disconnecting WebSocket done.");
+ }
+
+ private void reportError(final String errorMessage) {
+ Log.e(TAG, errorMessage);
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (state != WebSocketConnectionState.ERROR) {
+ state = WebSocketConnectionState.ERROR;
+ events.onWebSocketError(errorMessage);
+ }
+ }
+ });
+ }
+
+ // Asynchronously send POST/DELETE to WebSocket server.
+ private void sendWSSMessage(final String method, final String message) {
+ String postUrl = postServerUrl + "/" + roomID + "/" + clientID;
+ Log.d(TAG, "WS " + method + " : " + postUrl + " : " + message);
+ AsyncHttpURLConnection httpConnection =
+ new AsyncHttpURLConnection(method, postUrl, message, new AsyncHttpEvents() {
+ @Override
+ public void onHttpError(String errorMessage) {
+ reportError("WS " + method + " error: " + errorMessage);
+ }
+
+ @Override
+ public void onHttpComplete(String response) {}
+ });
+ httpConnection.send();
+ }
+
+ // Helper method for debugging purposes. Ensures that WebSocket method is
+ // called on a looper thread.
+ private void checkIfCalledOnValidThread() {
+ if (Thread.currentThread() != handler.getLooper().getThread()) {
+ throw new IllegalStateException("WebSocket method is not called on valid thread");
+ }
+ }
+
+ private class WebSocketObserver implements WebSocketConnectionObserver {
+ @Override
+ public void onOpen() {
+ Log.d(TAG, "WebSocket connection opened to: " + wsServerUrl);
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ state = WebSocketConnectionState.CONNECTED;
+ // Check if we have pending register request.
+ if (roomID != null && clientID != null) {
+ register(roomID, clientID);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onClose(WebSocketCloseNotification code, String reason) {
+ Log.d(TAG, "WebSocket connection closed. Code: " + code + ". Reason: " + reason + ". State: "
+ + state);
+ synchronized (closeEventLock) {
+ closeEvent = true;
+ closeEventLock.notify();
+ }
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (state != WebSocketConnectionState.CLOSED) {
+ state = WebSocketConnectionState.CLOSED;
+ events.onWebSocketClose();
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onTextMessage(String payload) {
+ Log.d(TAG, "WSS->C: " + payload);
+ final String message = payload;
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (state == WebSocketConnectionState.CONNECTED
+ || state == WebSocketConnectionState.REGISTERED) {
+ events.onWebSocketMessage(message);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onRawTextMessage(byte[] payload) {}
+
+ @Override
+ public void onBinaryMessage(byte[] payload) {}
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/WebSocketRTCClient.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/WebSocketRTCClient.java
new file mode 100644
index 0000000000..cbfdb21c91
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/WebSocketRTCClient.java
@@ -0,0 +1,427 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.util.Log;
+import androidx.annotation.Nullable;
+import org.appspot.apprtc.RoomParametersFetcher.RoomParametersFetcherEvents;
+import org.appspot.apprtc.WebSocketChannelClient.WebSocketChannelEvents;
+import org.appspot.apprtc.WebSocketChannelClient.WebSocketConnectionState;
+import org.appspot.apprtc.util.AsyncHttpURLConnection;
+import org.appspot.apprtc.util.AsyncHttpURLConnection.AsyncHttpEvents;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.webrtc.IceCandidate;
+import org.webrtc.SessionDescription;
+
+/**
+ * Negotiates signaling for chatting with https://appr.tc "rooms".
+ * Uses the client<->server specifics of the apprtc AppEngine webapp.
+ *
+ * <p>To use: create an instance of this object (registering a message handler) and
+ * call connectToRoom(). Once room connection is established
+ * onConnectedToRoom() callback with room parameters is invoked.
+ * Messages to other party (with local Ice candidates and answer SDP) can
+ * be sent after WebSocket connection is established.
+ */
+public class WebSocketRTCClient implements AppRTCClient, WebSocketChannelEvents {
+ private static final String TAG = "WSRTCClient";
+ private static final String ROOM_JOIN = "join";
+ private static final String ROOM_MESSAGE = "message";
+ private static final String ROOM_LEAVE = "leave";
+
+ private enum ConnectionState { NEW, CONNECTED, CLOSED, ERROR }
+
+ private enum MessageType { MESSAGE, LEAVE }
+
+ private final Handler handler;
+ private boolean initiator;
+ private SignalingEvents events;
+ private WebSocketChannelClient wsClient;
+ private ConnectionState roomState;
+ private RoomConnectionParameters connectionParameters;
+ private String messageUrl;
+ private String leaveUrl;
+
+ public WebSocketRTCClient(SignalingEvents events) {
+ this.events = events;
+ roomState = ConnectionState.NEW;
+ final HandlerThread handlerThread = new HandlerThread(TAG);
+ handlerThread.start();
+ handler = new Handler(handlerThread.getLooper());
+ }
+
+ // --------------------------------------------------------------------
+ // AppRTCClient interface implementation.
+ // Asynchronously connect to an AppRTC room URL using supplied connection
+ // parameters, retrieves room parameters and connect to WebSocket server.
+ @Override
+ public void connectToRoom(RoomConnectionParameters connectionParameters) {
+ this.connectionParameters = connectionParameters;
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ connectToRoomInternal();
+ }
+ });
+ }
+
+ @Override
+ public void disconnectFromRoom() {
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ disconnectFromRoomInternal();
+ handler.getLooper().quit();
+ }
+ });
+ }
+
+ // Connects to room - function runs on a local looper thread.
+ private void connectToRoomInternal() {
+ String connectionUrl = getConnectionUrl(connectionParameters);
+ Log.d(TAG, "Connect to room: " + connectionUrl);
+ roomState = ConnectionState.NEW;
+ wsClient = new WebSocketChannelClient(handler, this);
+
+ RoomParametersFetcherEvents callbacks = new RoomParametersFetcherEvents() {
+ @Override
+ public void onSignalingParametersReady(final SignalingParameters params) {
+ WebSocketRTCClient.this.handler.post(new Runnable() {
+ @Override
+ public void run() {
+ WebSocketRTCClient.this.signalingParametersReady(params);
+ }
+ });
+ }
+
+ @Override
+ public void onSignalingParametersError(String description) {
+ WebSocketRTCClient.this.reportError(description);
+ }
+ };
+
+ new RoomParametersFetcher(connectionUrl, null, callbacks).makeRequest();
+ }
+
+ // Disconnect from room and send bye messages - runs on a local looper thread.
+ private void disconnectFromRoomInternal() {
+ Log.d(TAG, "Disconnect. Room state: " + roomState);
+ if (roomState == ConnectionState.CONNECTED) {
+ Log.d(TAG, "Closing room.");
+ sendPostMessage(MessageType.LEAVE, leaveUrl, null);
+ }
+ roomState = ConnectionState.CLOSED;
+ if (wsClient != null) {
+ wsClient.disconnect(true);
+ }
+ }
+
+ // Helper functions to get connection, post message and leave message URLs
+ private String getConnectionUrl(RoomConnectionParameters connectionParameters) {
+ return connectionParameters.roomUrl + "/" + ROOM_JOIN + "/" + connectionParameters.roomId
+ + getQueryString(connectionParameters);
+ }
+
+ private String getMessageUrl(
+ RoomConnectionParameters connectionParameters, SignalingParameters signalingParameters) {
+ return connectionParameters.roomUrl + "/" + ROOM_MESSAGE + "/" + connectionParameters.roomId
+ + "/" + signalingParameters.clientId + getQueryString(connectionParameters);
+ }
+
+ private String getLeaveUrl(
+ RoomConnectionParameters connectionParameters, SignalingParameters signalingParameters) {
+ return connectionParameters.roomUrl + "/" + ROOM_LEAVE + "/" + connectionParameters.roomId + "/"
+ + signalingParameters.clientId + getQueryString(connectionParameters);
+ }
+
+ private String getQueryString(RoomConnectionParameters connectionParameters) {
+ if (connectionParameters.urlParameters != null) {
+ return "?" + connectionParameters.urlParameters;
+ } else {
+ return "";
+ }
+ }
+
+ // Callback issued when room parameters are extracted. Runs on local
+ // looper thread.
+ private void signalingParametersReady(final SignalingParameters signalingParameters) {
+ Log.d(TAG, "Room connection completed.");
+ if (connectionParameters.loopback
+ && (!signalingParameters.initiator || signalingParameters.offerSdp != null)) {
+ reportError("Loopback room is busy.");
+ return;
+ }
+ if (!connectionParameters.loopback && !signalingParameters.initiator
+ && signalingParameters.offerSdp == null) {
+ Log.w(TAG, "No offer SDP in room response.");
+ }
+ initiator = signalingParameters.initiator;
+ messageUrl = getMessageUrl(connectionParameters, signalingParameters);
+ leaveUrl = getLeaveUrl(connectionParameters, signalingParameters);
+ Log.d(TAG, "Message URL: " + messageUrl);
+ Log.d(TAG, "Leave URL: " + leaveUrl);
+ roomState = ConnectionState.CONNECTED;
+
+ // Fire connection and signaling parameters events.
+ events.onConnectedToRoom(signalingParameters);
+
+ // Connect and register WebSocket client.
+ wsClient.connect(signalingParameters.wssUrl, signalingParameters.wssPostUrl);
+ wsClient.register(connectionParameters.roomId, signalingParameters.clientId);
+ }
+
+ // Send local offer SDP to the other participant.
+ @Override
+ public void sendOfferSdp(final SessionDescription sdp) {
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (roomState != ConnectionState.CONNECTED) {
+ reportError("Sending offer SDP in non connected state.");
+ return;
+ }
+ JSONObject json = new JSONObject();
+ jsonPut(json, "sdp", sdp.description);
+ jsonPut(json, "type", "offer");
+ sendPostMessage(MessageType.MESSAGE, messageUrl, json.toString());
+ if (connectionParameters.loopback) {
+ // In loopback mode rename this offer to answer and route it back.
+ SessionDescription sdpAnswer = new SessionDescription(
+ SessionDescription.Type.fromCanonicalForm("answer"), sdp.description);
+ events.onRemoteDescription(sdpAnswer);
+ }
+ }
+ });
+ }
+
+ // Send local answer SDP to the other participant.
+ @Override
+ public void sendAnswerSdp(final SessionDescription sdp) {
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (connectionParameters.loopback) {
+ Log.e(TAG, "Sending answer in loopback mode.");
+ return;
+ }
+ JSONObject json = new JSONObject();
+ jsonPut(json, "sdp", sdp.description);
+ jsonPut(json, "type", "answer");
+ wsClient.send(json.toString());
+ }
+ });
+ }
+
+ // Send Ice candidate to the other participant.
+ @Override
+ public void sendLocalIceCandidate(final IceCandidate candidate) {
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ JSONObject json = new JSONObject();
+ jsonPut(json, "type", "candidate");
+ jsonPut(json, "label", candidate.sdpMLineIndex);
+ jsonPut(json, "id", candidate.sdpMid);
+ jsonPut(json, "candidate", candidate.sdp);
+ if (initiator) {
+ // Call initiator sends ice candidates to GAE server.
+ if (roomState != ConnectionState.CONNECTED) {
+ reportError("Sending ICE candidate in non connected state.");
+ return;
+ }
+ sendPostMessage(MessageType.MESSAGE, messageUrl, json.toString());
+ if (connectionParameters.loopback) {
+ events.onRemoteIceCandidate(candidate);
+ }
+ } else {
+ // Call receiver sends ice candidates to websocket server.
+ wsClient.send(json.toString());
+ }
+ }
+ });
+ }
+
+ // Send removed Ice candidates to the other participant.
+ @Override
+ public void sendLocalIceCandidateRemovals(final IceCandidate[] candidates) {
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ JSONObject json = new JSONObject();
+ jsonPut(json, "type", "remove-candidates");
+ JSONArray jsonArray = new JSONArray();
+ for (final IceCandidate candidate : candidates) {
+ jsonArray.put(toJsonCandidate(candidate));
+ }
+ jsonPut(json, "candidates", jsonArray);
+ if (initiator) {
+ // Call initiator sends ice candidates to GAE server.
+ if (roomState != ConnectionState.CONNECTED) {
+ reportError("Sending ICE candidate removals in non connected state.");
+ return;
+ }
+ sendPostMessage(MessageType.MESSAGE, messageUrl, json.toString());
+ if (connectionParameters.loopback) {
+ events.onRemoteIceCandidatesRemoved(candidates);
+ }
+ } else {
+ // Call receiver sends ice candidates to websocket server.
+ wsClient.send(json.toString());
+ }
+ }
+ });
+ }
+
+ // --------------------------------------------------------------------
+ // WebSocketChannelEvents interface implementation.
+ // All events are called by WebSocketChannelClient on a local looper thread
+ // (passed to WebSocket client constructor).
+ @Override
+ public void onWebSocketMessage(final String msg) {
+ if (wsClient.getState() != WebSocketConnectionState.REGISTERED) {
+ Log.e(TAG, "Got WebSocket message in non registered state.");
+ return;
+ }
+ try {
+ JSONObject json = new JSONObject(msg);
+ String msgText = json.getString("msg");
+ String errorText = json.optString("error");
+ if (msgText.length() > 0) {
+ json = new JSONObject(msgText);
+ String type = json.optString("type");
+ if (type.equals("candidate")) {
+ events.onRemoteIceCandidate(toJavaCandidate(json));
+ } else if (type.equals("remove-candidates")) {
+ JSONArray candidateArray = json.getJSONArray("candidates");
+ IceCandidate[] candidates = new IceCandidate[candidateArray.length()];
+ for (int i = 0; i < candidateArray.length(); ++i) {
+ candidates[i] = toJavaCandidate(candidateArray.getJSONObject(i));
+ }
+ events.onRemoteIceCandidatesRemoved(candidates);
+ } else if (type.equals("answer")) {
+ if (initiator) {
+ SessionDescription sdp = new SessionDescription(
+ SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
+ events.onRemoteDescription(sdp);
+ } else {
+ reportError("Received answer for call initiator: " + msg);
+ }
+ } else if (type.equals("offer")) {
+ if (!initiator) {
+ SessionDescription sdp = new SessionDescription(
+ SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
+ events.onRemoteDescription(sdp);
+ } else {
+ reportError("Received offer for call receiver: " + msg);
+ }
+ } else if (type.equals("bye")) {
+ events.onChannelClose();
+ } else {
+ reportError("Unexpected WebSocket message: " + msg);
+ }
+ } else {
+ if (errorText != null && errorText.length() > 0) {
+ reportError("WebSocket error message: " + errorText);
+ } else {
+ reportError("Unexpected WebSocket message: " + msg);
+ }
+ }
+ } catch (JSONException e) {
+ reportError("WebSocket message JSON parsing error: " + e.toString());
+ }
+ }
+
+ @Override
+ public void onWebSocketClose() {
+ events.onChannelClose();
+ }
+
+ @Override
+ public void onWebSocketError(String description) {
+ reportError("WebSocket error: " + description);
+ }
+
+ // --------------------------------------------------------------------
+ // Helper functions.
+ private void reportError(final String errorMessage) {
+ Log.e(TAG, errorMessage);
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (roomState != ConnectionState.ERROR) {
+ roomState = ConnectionState.ERROR;
+ events.onChannelError(errorMessage);
+ }
+ }
+ });
+ }
+
+ // Put a `key`->`value` mapping in `json`.
+ private static void jsonPut(JSONObject json, String key, Object value) {
+ try {
+ json.put(key, value);
+ } catch (JSONException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ // Send SDP or ICE candidate to a room server.
+ private void sendPostMessage(
+ final MessageType messageType, final String url, @Nullable final String message) {
+ String logInfo = url;
+ if (message != null) {
+ logInfo += ". Message: " + message;
+ }
+ Log.d(TAG, "C->GAE: " + logInfo);
+ AsyncHttpURLConnection httpConnection =
+ new AsyncHttpURLConnection("POST", url, message, new AsyncHttpEvents() {
+ @Override
+ public void onHttpError(String errorMessage) {
+ reportError("GAE POST error: " + errorMessage);
+ }
+
+ @Override
+ public void onHttpComplete(String response) {
+ if (messageType == MessageType.MESSAGE) {
+ try {
+ JSONObject roomJson = new JSONObject(response);
+ String result = roomJson.getString("result");
+ if (!result.equals("SUCCESS")) {
+ reportError("GAE POST error: " + result);
+ }
+ } catch (JSONException e) {
+ reportError("GAE POST JSON error: " + e.toString());
+ }
+ }
+ }
+ });
+ httpConnection.send();
+ }
+
+ // Converts a Java candidate to a JSONObject.
+ private JSONObject toJsonCandidate(final IceCandidate candidate) {
+ JSONObject json = new JSONObject();
+ jsonPut(json, "label", candidate.sdpMLineIndex);
+ jsonPut(json, "id", candidate.sdpMid);
+ jsonPut(json, "candidate", candidate.sdp);
+ return json;
+ }
+
+ // Converts a JSON candidate to a Java object.
+ IceCandidate toJavaCandidate(JSONObject json) throws JSONException {
+ return new IceCandidate(
+ json.getString("id"), json.getInt("label"), json.getString("candidate"));
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/util/AppRTCUtils.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/util/AppRTCUtils.java
new file mode 100644
index 0000000000..ee7f8c0416
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/util/AppRTCUtils.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc.util;
+
+import android.os.Build;
+import android.util.Log;
+
+/**
+ * AppRTCUtils provides helper functions for managing thread safety.
+ */
+public final class AppRTCUtils {
+ private AppRTCUtils() {}
+
+ /** Helper method which throws an exception when an assertion has failed. */
+ public static void assertIsTrue(boolean condition) {
+ if (!condition) {
+ throw new AssertionError("Expected condition to be true");
+ }
+ }
+
+ /** Helper method for building a string of thread information.*/
+ public static String getThreadInfo() {
+ return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId()
+ + "]";
+ }
+
+ /** Information about the current build, taken from system properties. */
+ public static void logDeviceInfo(String tag) {
+ Log.d(tag, "Android SDK: " + Build.VERSION.SDK_INT + ", "
+ + "Release: " + Build.VERSION.RELEASE + ", "
+ + "Brand: " + Build.BRAND + ", "
+ + "Device: " + Build.DEVICE + ", "
+ + "Id: " + Build.ID + ", "
+ + "Hardware: " + Build.HARDWARE + ", "
+ + "Manufacturer: " + Build.MANUFACTURER + ", "
+ + "Model: " + Build.MODEL + ", "
+ + "Product: " + Build.PRODUCT);
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/util/AsyncHttpURLConnection.java b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/util/AsyncHttpURLConnection.java
new file mode 100644
index 0000000000..93028ae783
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/src/org/appspot/apprtc/util/AsyncHttpURLConnection.java
@@ -0,0 +1,115 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc.util;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.net.HttpURLConnection;
+import java.net.SocketTimeoutException;
+import java.net.URL;
+import java.util.Scanner;
+
+/**
+ * Asynchronous http requests implementation.
+ */
+public class AsyncHttpURLConnection {
+ private static final int HTTP_TIMEOUT_MS = 8000;
+ private static final String HTTP_ORIGIN = "https://appr.tc";
+ private final String method;
+ private final String url;
+ private final String message;
+ private final AsyncHttpEvents events;
+ private String contentType;
+
+ /**
+ * Http requests callbacks.
+ */
+ public interface AsyncHttpEvents {
+ void onHttpError(String errorMessage);
+ void onHttpComplete(String response);
+ }
+
+ public AsyncHttpURLConnection(String method, String url, String message, AsyncHttpEvents events) {
+ this.method = method;
+ this.url = url;
+ this.message = message;
+ this.events = events;
+ }
+
+ public void setContentType(String contentType) {
+ this.contentType = contentType;
+ }
+
+ public void send() {
+ new Thread(this ::sendHttpMessage).start();
+ }
+
+ @SuppressWarnings("UseNetworkAnnotations")
+ private void sendHttpMessage() {
+ try {
+ HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
+ byte[] postData = new byte[0];
+ if (message != null) {
+ postData = message.getBytes("UTF-8");
+ }
+ connection.setRequestMethod(method);
+ connection.setUseCaches(false);
+ connection.setDoInput(true);
+ connection.setConnectTimeout(HTTP_TIMEOUT_MS);
+ connection.setReadTimeout(HTTP_TIMEOUT_MS);
+ // TODO(glaznev) - query request origin from pref_room_server_url_key preferences.
+ connection.addRequestProperty("origin", HTTP_ORIGIN);
+ boolean doOutput = false;
+ if (method.equals("POST")) {
+ doOutput = true;
+ connection.setDoOutput(true);
+ connection.setFixedLengthStreamingMode(postData.length);
+ }
+ if (contentType == null) {
+ connection.setRequestProperty("Content-Type", "text/plain; charset=utf-8");
+ } else {
+ connection.setRequestProperty("Content-Type", contentType);
+ }
+
+ // Send POST request.
+ if (doOutput && postData.length > 0) {
+ OutputStream outStream = connection.getOutputStream();
+ outStream.write(postData);
+ outStream.close();
+ }
+
+ // Get response.
+ int responseCode = connection.getResponseCode();
+ if (responseCode != 200) {
+ events.onHttpError("Non-200 response to " + method + " to URL: " + url + " : "
+ + connection.getHeaderField(null));
+ connection.disconnect();
+ return;
+ }
+ InputStream responseStream = connection.getInputStream();
+ String response = drainStream(responseStream);
+ responseStream.close();
+ connection.disconnect();
+ events.onHttpComplete(response);
+ } catch (SocketTimeoutException e) {
+ events.onHttpError("HTTP " + method + " to " + url + " timeout");
+ } catch (IOException e) {
+ events.onHttpError("HTTP " + method + " to " + url + " error: " + e.getMessage());
+ }
+ }
+
+ // Return the contents of an InputStream as a String.
+ private static String drainStream(InputStream in) {
+ Scanner s = new Scanner(in, "UTF-8").useDelimiter("\\A");
+ return s.hasNext() ? s.next() : "";
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/start_loopback_stubbed_camera_saved_video_out.py b/third_party/libwebrtc/examples/androidapp/start_loopback_stubbed_camera_saved_video_out.py
new file mode 100644
index 0000000000..b1cf84611f
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/start_loopback_stubbed_camera_saved_video_out.py
@@ -0,0 +1,127 @@
+# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+from optparse import OptionParser
+import random
+import string
+import subprocess
+import sys
+import time
+
+from com.android.monkeyrunner import MonkeyRunner, MonkeyDevice
+
+
+def main():
+ parser = OptionParser()
+
+ parser.add_option('--devname', dest='devname', help='The device id')
+
+ parser.add_option(
+ '--videooutsave',
+ dest='videooutsave',
+ help='The path where to save the video out file on local computer')
+
+ parser.add_option('--videoout',
+ dest='videoout',
+ help='The path where to put the video out file')
+
+ parser.add_option('--videoout_width',
+ dest='videoout_width',
+ type='int',
+ help='The width for the video out file')
+
+ parser.add_option('--videoout_height',
+ dest='videoout_height',
+ type='int',
+ help='The height for the video out file')
+
+ parser.add_option(
+ '--videoin',
+ dest='videoin',
+ help='The path where to read input file instead of camera')
+
+ parser.add_option('--call_length',
+ dest='call_length',
+ type='int',
+ help='The length of the call')
+
+ (options, args) = parser.parse_args()
+
+ print(options, args)
+
+ devname = options.devname
+
+ videoin = options.videoin
+
+ videoout = options.videoout
+ videoout_width = options.videoout_width
+ videoout_height = options.videoout_height
+
+ videooutsave = options.videooutsave
+
+ call_length = options.call_length or 10
+
+ room = ''.join(
+ random.choice(string.ascii_letters + string.digits) for _ in range(8))
+
+ # Delete output video file.
+ if videoout:
+ subprocess.check_call(
+ ['adb', '-s', devname, 'shell', 'rm', '-f', videoout])
+
+ device = MonkeyRunner.waitForConnection(2, devname)
+
+ extras = {
+ 'org.appspot.apprtc.USE_VALUES_FROM_INTENT': True,
+ 'org.appspot.apprtc.AUDIOCODEC': 'OPUS',
+ 'org.appspot.apprtc.LOOPBACK': True,
+ 'org.appspot.apprtc.VIDEOCODEC': 'VP8',
+ 'org.appspot.apprtc.CAPTURETOTEXTURE': False,
+ 'org.appspot.apprtc.CAMERA2': False,
+ 'org.appspot.apprtc.ROOMID': room
+ }
+
+ if videoin:
+ extras.update({'org.appspot.apprtc.VIDEO_FILE_AS_CAMERA': videoin})
+
+ if videoout:
+ extras.update({
+ 'org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE':
+ videoout,
+ 'org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_WIDTH':
+ videoout_width,
+ 'org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT':
+ videoout_height
+ })
+
+ print extras
+
+ device.startActivity(data='https://appr.tc',
+ action='android.intent.action.VIEW',
+ component='org.appspot.apprtc/.ConnectActivity',
+ extras=extras)
+
+ print 'Running a call for %d seconds' % call_length
+ for _ in xrange(call_length):
+ sys.stdout.write('.')
+ sys.stdout.flush()
+ time.sleep(1)
+ print '\nEnding call.'
+
+ # Press back to end the call. Will end on both sides.
+ device.press('KEYCODE_BACK', MonkeyDevice.DOWN_AND_UP)
+
+ if videooutsave:
+ time.sleep(2)
+
+ subprocess.check_call(
+ ['adb', '-s', devname, 'pull', videoout, videooutsave])
+
+
+if __name__ == '__main__':
+ main()
diff --git a/third_party/libwebrtc/examples/androidapp/third_party/autobanh/BUILD.gn b/third_party/libwebrtc/examples/androidapp/third_party/autobanh/BUILD.gn
new file mode 100644
index 0000000000..b671239bae
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/third_party/autobanh/BUILD.gn
@@ -0,0 +1,15 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+if (is_android) {
+ import("//build/config/android/rules.gni")
+
+ android_java_prebuilt("autobanh_java") {
+ jar_path = "lib/autobanh.jar"
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidapp/third_party/autobanh/LICENSE b/third_party/libwebrtc/examples/androidapp/third_party/autobanh/LICENSE
new file mode 100644
index 0000000000..f433b1a53f
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/third_party/autobanh/LICENSE
@@ -0,0 +1,177 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
diff --git a/third_party/libwebrtc/examples/androidapp/third_party/autobanh/LICENSE.md b/third_party/libwebrtc/examples/androidapp/third_party/autobanh/LICENSE.md
new file mode 100644
index 0000000000..2079e90d6b
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/third_party/autobanh/LICENSE.md
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2014 Cameron Lowell Palmer
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/third_party/libwebrtc/examples/androidapp/third_party/autobanh/NOTICE b/third_party/libwebrtc/examples/androidapp/third_party/autobanh/NOTICE
new file mode 100644
index 0000000000..91ed7dfe0e
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/third_party/autobanh/NOTICE
@@ -0,0 +1,3 @@
+AutobahnAndroid
+Copyright 2011,2012 Tavendo GmbH. Licensed under Apache 2.0
+This product includes software developed at Tavendo GmbH http://www.tavendo.de
diff --git a/third_party/libwebrtc/examples/androidapp/third_party/autobanh/lib/autobanh.jar b/third_party/libwebrtc/examples/androidapp/third_party/autobanh/lib/autobanh.jar
new file mode 100644
index 0000000000..5a10b7f3f1
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidapp/third_party/autobanh/lib/autobanh.jar
Binary files differ
diff --git a/third_party/libwebrtc/examples/androidjunit/OWNERS b/third_party/libwebrtc/examples/androidjunit/OWNERS
new file mode 100644
index 0000000000..cf092a316a
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidjunit/OWNERS
@@ -0,0 +1 @@
+xalep@webrtc.org
diff --git a/third_party/libwebrtc/examples/androidjunit/README b/third_party/libwebrtc/examples/androidjunit/README
new file mode 100644
index 0000000000..03902a779c
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidjunit/README
@@ -0,0 +1,8 @@
+This directory contains example JUnit tests for Android AppRTCMobile.
+Many of these test utilize Robolectric to mock Android classes.
+
+To compile:
+ninja -C out/Debug android_examples_junit_tests
+
+To run:
+out/Debug/bin/run_android_examples_junit_tests
diff --git a/third_party/libwebrtc/examples/androidjunit/src/org/appspot/apprtc/BluetoothManagerTest.java b/third_party/libwebrtc/examples/androidjunit/src/org/appspot/apprtc/BluetoothManagerTest.java
new file mode 100644
index 0000000000..d7c190518c
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidjunit/src/org/appspot/apprtc/BluetoothManagerTest.java
@@ -0,0 +1,268 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.bluetooth.BluetoothAdapter;
+import android.bluetooth.BluetoothDevice;
+import android.bluetooth.BluetoothHeadset;
+import android.bluetooth.BluetoothProfile;
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.media.AudioManager;
+import android.util.Log;
+import androidx.test.core.app.ApplicationProvider;
+import java.util.ArrayList;
+import java.util.List;
+import org.appspot.apprtc.AppRTCBluetoothManager.State;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.robolectric.annotation.Config;
+import org.robolectric.shadows.ShadowLog;
+import org.robolectric.RobolectricTestRunner;
+
+/**
+ * Verifies basic behavior of the AppRTCBluetoothManager class.
+ * Note that the test object uses an AppRTCAudioManager (injected in ctor),
+ * but a mocked version is used instead. Hence, the parts "driven" by the AppRTC
+ * audio manager are not included in this test.
+ */
+@RunWith(RobolectricTestRunner.class)
+@Config(manifest = Config.NONE)
+public class BluetoothManagerTest {
+ private static final String TAG = "BluetoothManagerTest";
+ private static final String BLUETOOTH_TEST_DEVICE_NAME = "BluetoothTestDevice";
+
+ private BroadcastReceiver bluetoothHeadsetStateReceiver;
+ private BluetoothProfile.ServiceListener bluetoothServiceListener;
+ private BluetoothHeadset mockedBluetoothHeadset;
+ private BluetoothDevice mockedBluetoothDevice;
+ private List<BluetoothDevice> mockedBluetoothDeviceList;
+ private AppRTCBluetoothManager bluetoothManager;
+ private AppRTCAudioManager mockedAppRtcAudioManager;
+ private AudioManager mockedAudioManager;
+ private Context context;
+
+ @Before
+ public void setUp() {
+ ShadowLog.stream = System.out;
+ context = ApplicationProvider.getApplicationContext();
+ mockedAppRtcAudioManager = mock(AppRTCAudioManager.class);
+ mockedAudioManager = mock(AudioManager.class);
+ mockedBluetoothHeadset = mock(BluetoothHeadset.class);
+ mockedBluetoothDevice = mock(BluetoothDevice.class);
+ mockedBluetoothDeviceList = new ArrayList<BluetoothDevice>();
+
+ // Simulate that bluetooth SCO audio is available by default.
+ when(mockedAudioManager.isBluetoothScoAvailableOffCall()).thenReturn(true);
+
+ // Create the test object and override protected methods for this test.
+ bluetoothManager = new AppRTCBluetoothManager(context, mockedAppRtcAudioManager) {
+ @Override
+ protected AudioManager getAudioManager(Context context) {
+ Log.d(TAG, "getAudioManager");
+ return mockedAudioManager;
+ }
+
+ @Override
+ protected void registerReceiver(BroadcastReceiver receiver, IntentFilter filter) {
+ Log.d(TAG, "registerReceiver");
+ if (filter.hasAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED)
+ && filter.hasAction(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED)) {
+ // Gives access to the real broadcast receiver so the test can use it.
+ bluetoothHeadsetStateReceiver = receiver;
+ }
+ }
+
+ @Override
+ protected void unregisterReceiver(BroadcastReceiver receiver) {
+ Log.d(TAG, "unregisterReceiver");
+ if (receiver == bluetoothHeadsetStateReceiver) {
+ bluetoothHeadsetStateReceiver = null;
+ }
+ }
+
+ @Override
+ protected boolean getBluetoothProfileProxy(
+ Context context, BluetoothProfile.ServiceListener listener, int profile) {
+ Log.d(TAG, "getBluetoothProfileProxy");
+ if (profile == BluetoothProfile.HEADSET) {
+ // Allows the test to access the real Bluetooth service listener object.
+ bluetoothServiceListener = listener;
+ }
+ return true;
+ }
+
+ @Override
+ protected boolean hasPermission(Context context, String permission) {
+ Log.d(TAG, "hasPermission(" + permission + ")");
+ // Ensure that the client asks for Bluetooth permission.
+ return android.Manifest.permission.BLUETOOTH.equals(permission);
+ }
+
+ @Override
+ protected void logBluetoothAdapterInfo(BluetoothAdapter localAdapter) {
+ // Do nothing in tests. No need to mock BluetoothAdapter.
+ }
+ };
+ }
+
+ // Verify that Bluetooth service listener for headset profile is properly initialized.
+ @Test
+ public void testBluetoothServiceListenerInitialized() {
+ bluetoothManager.start();
+ assertNotNull(bluetoothServiceListener);
+ verify(mockedAppRtcAudioManager, never()).updateAudioDeviceState();
+ }
+
+ // Verify that broadcast receivers for Bluetooth SCO audio state and Bluetooth headset state
+ // are properly registered and unregistered.
+ @Test
+ public void testBluetoothBroadcastReceiversAreRegistered() {
+ bluetoothManager.start();
+ assertNotNull(bluetoothHeadsetStateReceiver);
+ bluetoothManager.stop();
+ assertNull(bluetoothHeadsetStateReceiver);
+ }
+
+ // Verify that the Bluetooth manager starts and stops with correct states.
+ @Test
+ public void testBluetoothDefaultStartStopStates() {
+ bluetoothManager.start();
+ assertEquals(bluetoothManager.getState(), State.HEADSET_UNAVAILABLE);
+ bluetoothManager.stop();
+ assertEquals(bluetoothManager.getState(), State.UNINITIALIZED);
+ }
+
+ // Verify correct state after receiving BluetoothServiceListener.onServiceConnected()
+ // when no BT device is enabled.
+ @Test
+ public void testBluetoothServiceListenerConnectedWithNoHeadset() {
+ bluetoothManager.start();
+ assertEquals(bluetoothManager.getState(), State.HEADSET_UNAVAILABLE);
+ simulateBluetoothServiceConnectedWithNoConnectedHeadset();
+ verify(mockedAppRtcAudioManager, times(1)).updateAudioDeviceState();
+ assertEquals(bluetoothManager.getState(), State.HEADSET_UNAVAILABLE);
+ }
+
+ // Verify correct state after receiving BluetoothServiceListener.onServiceConnected()
+ // when one emulated (test) BT device is enabled. Android does not support more than
+ // one connected BT headset.
+ @Test
+ public void testBluetoothServiceListenerConnectedWithHeadset() {
+ bluetoothManager.start();
+ assertEquals(bluetoothManager.getState(), State.HEADSET_UNAVAILABLE);
+ simulateBluetoothServiceConnectedWithConnectedHeadset();
+ verify(mockedAppRtcAudioManager, times(1)).updateAudioDeviceState();
+ assertEquals(bluetoothManager.getState(), State.HEADSET_AVAILABLE);
+ }
+
+ // Verify correct state after receiving BluetoothProfile.ServiceListener.onServiceDisconnected().
+ @Test
+ public void testBluetoothServiceListenerDisconnected() {
+ bluetoothManager.start();
+ assertEquals(bluetoothManager.getState(), State.HEADSET_UNAVAILABLE);
+ simulateBluetoothServiceDisconnected();
+ verify(mockedAppRtcAudioManager, times(1)).updateAudioDeviceState();
+ assertEquals(bluetoothManager.getState(), State.HEADSET_UNAVAILABLE);
+ }
+
+ // Verify correct state after BluetoothServiceListener.onServiceConnected() and
+ // the intent indicating that the headset is actually connected. Both these callbacks
+ // results in calls to updateAudioDeviceState() on the AppRTC audio manager.
+ // No BT SCO is enabled here to keep the test limited.
+ @Test
+ public void testBluetoothHeadsetConnected() {
+ bluetoothManager.start();
+ assertEquals(bluetoothManager.getState(), State.HEADSET_UNAVAILABLE);
+ simulateBluetoothServiceConnectedWithConnectedHeadset();
+ simulateBluetoothHeadsetConnected();
+ verify(mockedAppRtcAudioManager, times(2)).updateAudioDeviceState();
+ assertEquals(bluetoothManager.getState(), State.HEADSET_AVAILABLE);
+ }
+
+ // Verify correct state sequence for a case when a BT headset is available,
+ // followed by BT SCO audio being enabled and then stopped.
+ @Test
+ public void testBluetoothScoAudioStartAndStop() {
+ bluetoothManager.start();
+ assertEquals(bluetoothManager.getState(), State.HEADSET_UNAVAILABLE);
+ simulateBluetoothServiceConnectedWithConnectedHeadset();
+ assertEquals(bluetoothManager.getState(), State.HEADSET_AVAILABLE);
+ bluetoothManager.startScoAudio();
+ assertEquals(bluetoothManager.getState(), State.SCO_CONNECTING);
+ simulateBluetoothScoConnectionConnected();
+ assertEquals(bluetoothManager.getState(), State.SCO_CONNECTED);
+ bluetoothManager.stopScoAudio();
+ simulateBluetoothScoConnectionDisconnected();
+ assertEquals(bluetoothManager.getState(), State.SCO_DISCONNECTING);
+ bluetoothManager.stop();
+ assertEquals(bluetoothManager.getState(), State.UNINITIALIZED);
+ verify(mockedAppRtcAudioManager, times(3)).updateAudioDeviceState();
+ }
+
+ /**
+ * Private helper methods.
+ */
+ private void simulateBluetoothServiceConnectedWithNoConnectedHeadset() {
+ mockedBluetoothDeviceList.clear();
+ when(mockedBluetoothHeadset.getConnectedDevices()).thenReturn(mockedBluetoothDeviceList);
+ bluetoothServiceListener.onServiceConnected(BluetoothProfile.HEADSET, mockedBluetoothHeadset);
+ // In real life, the AppRTC audio manager makes this call.
+ bluetoothManager.updateDevice();
+ }
+
+ private void simulateBluetoothServiceConnectedWithConnectedHeadset() {
+ mockedBluetoothDeviceList.clear();
+ mockedBluetoothDeviceList.add(mockedBluetoothDevice);
+ when(mockedBluetoothHeadset.getConnectedDevices()).thenReturn(mockedBluetoothDeviceList);
+ when(mockedBluetoothDevice.getName()).thenReturn(BLUETOOTH_TEST_DEVICE_NAME);
+ bluetoothServiceListener.onServiceConnected(BluetoothProfile.HEADSET, mockedBluetoothHeadset);
+ // In real life, the AppRTC audio manager makes this call.
+ bluetoothManager.updateDevice();
+ }
+
+ private void simulateBluetoothServiceDisconnected() {
+ bluetoothServiceListener.onServiceDisconnected(BluetoothProfile.HEADSET);
+ }
+
+ private void simulateBluetoothHeadsetConnected() {
+ Intent intent = new Intent();
+ intent.setAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED);
+ intent.putExtra(BluetoothHeadset.EXTRA_STATE, BluetoothHeadset.STATE_CONNECTED);
+ bluetoothHeadsetStateReceiver.onReceive(context, intent);
+ }
+
+ private void simulateBluetoothScoConnectionConnected() {
+ Intent intent = new Intent();
+ intent.setAction(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED);
+ intent.putExtra(BluetoothHeadset.EXTRA_STATE, BluetoothHeadset.STATE_AUDIO_CONNECTED);
+ bluetoothHeadsetStateReceiver.onReceive(context, intent);
+ }
+
+ private void simulateBluetoothScoConnectionDisconnected() {
+ Intent intent = new Intent();
+ intent.setAction(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED);
+ intent.putExtra(BluetoothHeadset.EXTRA_STATE, BluetoothHeadset.STATE_AUDIO_DISCONNECTED);
+ bluetoothHeadsetStateReceiver.onReceive(context, intent);
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidjunit/src/org/appspot/apprtc/DirectRTCClientTest.java b/third_party/libwebrtc/examples/androidjunit/src/org/appspot/apprtc/DirectRTCClientTest.java
new file mode 100644
index 0000000000..1ee0e41390
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidjunit/src/org/appspot/apprtc/DirectRTCClientTest.java
@@ -0,0 +1,155 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.isNotNull;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.timeout;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.verifyNoMoreInteractions;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.robolectric.annotation.Config;
+import org.robolectric.shadows.ShadowLog;
+import org.robolectric.RobolectricTestRunner;
+import org.webrtc.IceCandidate;
+import org.webrtc.SessionDescription;
+
+/**
+ * Test for DirectRTCClient. Test is very simple and only tests the overall sanity of the class
+ * behaviour.
+ */
+@RunWith(RobolectricTestRunner.class)
+@Config(manifest = Config.NONE)
+public class DirectRTCClientTest {
+ private static final String ROOM_URL = "";
+ private static final boolean LOOPBACK = false;
+
+ private static final String DUMMY_SDP_MID = "sdpMid";
+ private static final String DUMMY_SDP = "sdp";
+
+ public static final int SERVER_WAIT = 100;
+ public static final int NETWORK_TIMEOUT = 1000;
+
+ private DirectRTCClient client;
+ private DirectRTCClient server;
+
+ AppRTCClient.SignalingEvents clientEvents;
+ AppRTCClient.SignalingEvents serverEvents;
+
+ @Before
+ public void setUp() {
+ ShadowLog.stream = System.out;
+
+ clientEvents = mock(AppRTCClient.SignalingEvents.class);
+ serverEvents = mock(AppRTCClient.SignalingEvents.class);
+
+ client = new DirectRTCClient(clientEvents);
+ server = new DirectRTCClient(serverEvents);
+ }
+
+ @Test
+ public void testValidIpPattern() {
+ // Strings that should match the pattern.
+ // clang-format off
+ final String[] ipAddresses = new String[] {
+ "0.0.0.0",
+ "127.0.0.1",
+ "192.168.0.1",
+ "0.0.0.0:8888",
+ "127.0.0.1:8888",
+ "192.168.0.1:8888",
+ "::",
+ "::1",
+ "2001:0db8:85a3:0000:0000:8a2e:0370:7946",
+ "[::]",
+ "[::1]",
+ "[2001:0db8:85a3:0000:0000:8a2e:0370:7946]",
+ "[::]:8888",
+ "[::1]:8888",
+ "[2001:0db8:85a3:0000:0000:8a2e:0370:7946]:8888"
+ };
+ // clang-format on
+
+ for (String ip : ipAddresses) {
+ assertTrue(ip + " didn't match IP_PATTERN even though it should.",
+ DirectRTCClient.IP_PATTERN.matcher(ip).matches());
+ }
+ }
+
+ @Test
+ public void testInvalidIpPattern() {
+ // Strings that shouldn't match the pattern.
+ // clang-format off
+ final String[] invalidIpAddresses = new String[] {
+ "Hello, World!",
+ "aaaa",
+ "1111",
+ "[hello world]",
+ "hello:world"
+ };
+ // clang-format on
+
+ for (String invalidIp : invalidIpAddresses) {
+ assertFalse(invalidIp + " matched IP_PATTERN even though it shouldn't.",
+ DirectRTCClient.IP_PATTERN.matcher(invalidIp).matches());
+ }
+ }
+
+ // TODO(sakal): Replace isNotNull(class) with isNotNull() once Java 8 is used.
+ @SuppressWarnings("deprecation")
+ @Test
+ public void testDirectRTCClient() {
+ server.connectToRoom(new AppRTCClient.RoomConnectionParameters(ROOM_URL, "0.0.0.0", LOOPBACK));
+ try {
+ Thread.sleep(SERVER_WAIT);
+ } catch (InterruptedException e) {
+ fail(e.getMessage());
+ }
+ client.connectToRoom(
+ new AppRTCClient.RoomConnectionParameters(ROOM_URL, "127.0.0.1", LOOPBACK));
+ verify(serverEvents, timeout(NETWORK_TIMEOUT))
+ .onConnectedToRoom(any(AppRTCClient.SignalingParameters.class));
+
+ SessionDescription offerSdp = new SessionDescription(SessionDescription.Type.OFFER, DUMMY_SDP);
+ server.sendOfferSdp(offerSdp);
+ verify(clientEvents, timeout(NETWORK_TIMEOUT))
+ .onConnectedToRoom(any(AppRTCClient.SignalingParameters.class));
+
+ SessionDescription answerSdp =
+ new SessionDescription(SessionDescription.Type.ANSWER, DUMMY_SDP);
+ client.sendAnswerSdp(answerSdp);
+ verify(serverEvents, timeout(NETWORK_TIMEOUT))
+ .onRemoteDescription(isNotNull(SessionDescription.class));
+
+ IceCandidate candidate = new IceCandidate(DUMMY_SDP_MID, 0, DUMMY_SDP);
+ server.sendLocalIceCandidate(candidate);
+ verify(clientEvents, timeout(NETWORK_TIMEOUT))
+ .onRemoteIceCandidate(isNotNull(IceCandidate.class));
+
+ client.sendLocalIceCandidate(candidate);
+ verify(serverEvents, timeout(NETWORK_TIMEOUT))
+ .onRemoteIceCandidate(isNotNull(IceCandidate.class));
+
+ client.disconnectFromRoom();
+ verify(clientEvents, timeout(NETWORK_TIMEOUT)).onChannelClose();
+ verify(serverEvents, timeout(NETWORK_TIMEOUT)).onChannelClose();
+
+ verifyNoMoreInteractions(clientEvents);
+ verifyNoMoreInteractions(serverEvents);
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidjunit/src/org/appspot/apprtc/TCPChannelClientTest.java b/third_party/libwebrtc/examples/androidjunit/src/org/appspot/apprtc/TCPChannelClientTest.java
new file mode 100644
index 0000000000..ce550b35e4
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidjunit/src/org/appspot/apprtc/TCPChannelClientTest.java
@@ -0,0 +1,199 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc;
+
+import static org.junit.Assert.fail;
+import static org.mockito.Mockito.timeout;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.verifyNoMoreInteractions;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.robolectric.annotation.Config;
+import org.robolectric.shadows.ShadowLog;
+import org.robolectric.RobolectricTestRunner;
+
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+
+@RunWith(RobolectricTestRunner.class)
+@Config(manifest = Config.NONE)
+public class TCPChannelClientTest {
+ private static final int PORT = 8888;
+ /**
+ * How long we wait before trying to connect to the server. Note: was
+ * previously only 10, which was too short (tests were flaky).
+ */
+ private static final int SERVER_WAIT = 300;
+ private static final int CONNECT_TIMEOUT = 1000;
+ private static final int SEND_TIMEOUT = 1000;
+ private static final int DISCONNECT_TIMEOUT = 1000;
+ private static final int TERMINATION_TIMEOUT = 1000;
+ private static final String TEST_MESSAGE_SERVER = "Hello, Server!";
+ private static final String TEST_MESSAGE_CLIENT = "Hello, Client!";
+
+ @Mock TCPChannelClient.TCPChannelEvents serverEvents;
+ @Mock TCPChannelClient.TCPChannelEvents clientEvents;
+
+ private ExecutorService executor;
+ private TCPChannelClient server;
+ private TCPChannelClient client;
+
+ @Before
+ public void setUp() {
+ ShadowLog.stream = System.out;
+
+ MockitoAnnotations.initMocks(this);
+
+ executor = Executors.newSingleThreadExecutor();
+ }
+
+ @After
+ public void tearDown() {
+ verifyNoMoreEvents();
+
+ executeAndWait(new Runnable() {
+ @Override
+ public void run() {
+ client.disconnect();
+ server.disconnect();
+ }
+ });
+
+ // Stop the executor thread
+ executor.shutdown();
+ try {
+ executor.awaitTermination(TERMINATION_TIMEOUT, TimeUnit.MILLISECONDS);
+ } catch (InterruptedException e) {
+ fail(e.getMessage());
+ }
+ }
+
+ @Test
+ public void testConnectIPv4() {
+ setUpIPv4Server();
+ try {
+ Thread.sleep(SERVER_WAIT);
+ } catch (InterruptedException e) {
+ fail(e.getMessage());
+ }
+ setUpIPv4Client();
+
+ verify(serverEvents, timeout(CONNECT_TIMEOUT)).onTCPConnected(true);
+ verify(clientEvents, timeout(CONNECT_TIMEOUT)).onTCPConnected(false);
+ }
+
+ @Test
+ public void testConnectIPv6() {
+ setUpIPv6Server();
+ try {
+ Thread.sleep(SERVER_WAIT);
+ } catch (InterruptedException e) {
+ fail(e.getMessage());
+ }
+ setUpIPv6Client();
+
+ verify(serverEvents, timeout(CONNECT_TIMEOUT)).onTCPConnected(true);
+ verify(clientEvents, timeout(CONNECT_TIMEOUT)).onTCPConnected(false);
+ }
+
+ @Test
+ public void testSendData() {
+ testConnectIPv4();
+
+ executeAndWait(new Runnable() {
+ @Override
+ public void run() {
+ client.send(TEST_MESSAGE_SERVER);
+ server.send(TEST_MESSAGE_CLIENT);
+ }
+ });
+
+ verify(serverEvents, timeout(SEND_TIMEOUT)).onTCPMessage(TEST_MESSAGE_SERVER);
+ verify(clientEvents, timeout(SEND_TIMEOUT)).onTCPMessage(TEST_MESSAGE_CLIENT);
+ }
+
+ @Test
+ public void testDisconnectServer() {
+ testConnectIPv4();
+ executeAndWait(new Runnable() {
+ @Override
+ public void run() {
+ server.disconnect();
+ }
+ });
+
+ verify(serverEvents, timeout(DISCONNECT_TIMEOUT)).onTCPClose();
+ verify(clientEvents, timeout(DISCONNECT_TIMEOUT)).onTCPClose();
+ }
+
+ @Test
+ public void testDisconnectClient() {
+ testConnectIPv4();
+ executeAndWait(new Runnable() {
+ @Override
+ public void run() {
+ client.disconnect();
+ }
+ });
+
+ verify(serverEvents, timeout(DISCONNECT_TIMEOUT)).onTCPClose();
+ verify(clientEvents, timeout(DISCONNECT_TIMEOUT)).onTCPClose();
+ }
+
+ private void setUpIPv4Server() {
+ setUpServer("0.0.0.0", PORT);
+ }
+
+ private void setUpIPv4Client() {
+ setUpClient("127.0.0.1", PORT);
+ }
+
+ private void setUpIPv6Server() {
+ setUpServer("::", PORT);
+ }
+
+ private void setUpIPv6Client() {
+ setUpClient("::1", PORT);
+ }
+
+ private void setUpServer(String ip, int port) {
+ server = new TCPChannelClient(executor, serverEvents, ip, port);
+ }
+
+ private void setUpClient(String ip, int port) {
+ client = new TCPChannelClient(executor, clientEvents, ip, port);
+ }
+
+ /**
+ * Verifies no more server or client events have been issued
+ */
+ private void verifyNoMoreEvents() {
+ verifyNoMoreInteractions(serverEvents);
+ verifyNoMoreInteractions(clientEvents);
+ }
+
+ /**
+ * Queues runnable to be run and waits for it to be executed by the executor thread
+ */
+ public void executeAndWait(Runnable runnable) {
+ try {
+ executor.submit(runnable).get();
+ } catch (Exception e) {
+ fail(e.getMessage());
+ }
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidnativeapi/AndroidManifest.xml b/third_party/libwebrtc/examples/androidnativeapi/AndroidManifest.xml
new file mode 100644
index 0000000000..27c7d9dd35
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidnativeapi/AndroidManifest.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="org.webrtc.examples.androidnativeapi">
+
+
+ <uses-permission android:name="android.permission.INTERNET" />
+ <uses-permission android:name="android.permission.CAMERA" />
+
+ <application
+ android:allowBackup="true"
+ android:label="@string/app_name"
+ android:supportsRtl="true">
+ <activity android:name=".MainActivity"
+ android:exported="true">
+ <intent-filter>
+ <action android:name="android.intent.action.MAIN" />
+
+ <category android:name="android.intent.category.LAUNCHER" />
+ </intent-filter>
+ </activity>
+ </application>
+
+</manifest>
diff --git a/third_party/libwebrtc/examples/androidnativeapi/BUILD.gn b/third_party/libwebrtc/examples/androidnativeapi/BUILD.gn
new file mode 100644
index 0000000000..e0eb6d8b24
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidnativeapi/BUILD.gn
@@ -0,0 +1,80 @@
+import("//webrtc.gni")
+
+if (is_android) {
+ import("//third_party/jni_zero/jni_zero.gni")
+ rtc_android_apk("androidnativeapi") {
+ testonly = true
+ apk_name = "androidnativeapi"
+ android_manifest = "AndroidManifest.xml"
+ min_sdk_version = 21
+ target_sdk_version = 31
+
+ sources = [
+ "java/org/webrtc/examples/androidnativeapi/CallClient.java",
+ "java/org/webrtc/examples/androidnativeapi/MainActivity.java",
+ ]
+
+ deps = [
+ ":resources",
+ "//rtc_base:base_java",
+ "//sdk/android:camera_java",
+ "//sdk/android:surfaceviewrenderer_java",
+ "//sdk/android:video_api_java",
+ "//sdk/android:video_java",
+ "//third_party/androidx:androidx_annotation_annotation_java",
+ ]
+
+ shared_libraries = [ ":examples_androidnativeapi_jni" ]
+ }
+
+ generate_jni("generated_jni") {
+ testonly = true
+ sources = [ "java/org/webrtc/examples/androidnativeapi/CallClient.java" ]
+ namespace = "webrtc_examples"
+ jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
+ }
+
+ rtc_shared_library("examples_androidnativeapi_jni") {
+ testonly = true
+ sources = [
+ "jni/android_call_client.cc",
+ "jni/android_call_client.h",
+ "jni/onload.cc",
+ ]
+
+ suppressed_configs += [ "//build/config/android:hide_all_but_jni_onload" ]
+ configs += [ "//build/config/android:hide_all_but_jni" ]
+
+ deps = [
+ ":generated_jni",
+ "../../api:scoped_refptr",
+ "../../api:sequence_checker",
+ "../../rtc_base:ssl",
+ "../../rtc_base/synchronization:mutex",
+ "//api:libjingle_peerconnection_api",
+ "//api/rtc_event_log:rtc_event_log_factory",
+ "//api/task_queue:default_task_queue_factory",
+ "//media:rtc_audio_video",
+ "//media:rtc_internal_video_codecs",
+ "//media:rtc_media_engine_defaults",
+ "//modules/utility",
+ "//pc:libjingle_peerconnection",
+ "//sdk/android:native_api_base",
+ "//sdk/android:native_api_jni",
+ "//sdk/android:native_api_video",
+ ]
+ }
+
+ android_resources("resources") {
+ testonly = true
+ custom_package = "org.webrtc.examples.androidnativeapi"
+ sources = [
+ "res/layout/activity_main.xml",
+ "res/values/strings.xml",
+ ]
+
+ # Needed for Bazel converter.
+ resource_dirs = [ "res" ]
+ assert(resource_dirs != []) # Mark as used.
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidnativeapi/DEPS b/third_party/libwebrtc/examples/androidnativeapi/DEPS
new file mode 100644
index 0000000000..5dbfcf1bb8
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidnativeapi/DEPS
@@ -0,0 +1,5 @@
+include_rules = [
+ "+logging/rtc_event_log/rtc_event_log_factory.h",
+ "+modules/utility/include",
+ "+sdk/android/native_api",
+]
diff --git a/third_party/libwebrtc/examples/androidnativeapi/OWNERS b/third_party/libwebrtc/examples/androidnativeapi/OWNERS
new file mode 100644
index 0000000000..cf092a316a
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidnativeapi/OWNERS
@@ -0,0 +1 @@
+xalep@webrtc.org
diff --git a/third_party/libwebrtc/examples/androidnativeapi/java/org/webrtc/examples/androidnativeapi/CallClient.java b/third_party/libwebrtc/examples/androidnativeapi/java/org/webrtc/examples/androidnativeapi/CallClient.java
new file mode 100644
index 0000000000..7369a1286d
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidnativeapi/java/org/webrtc/examples/androidnativeapi/CallClient.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.examples.androidnativeapi;
+
+import android.content.Context;
+import android.os.Handler;
+import android.os.HandlerThread;
+import org.webrtc.CapturerObserver;
+import org.webrtc.SurfaceTextureHelper;
+import org.webrtc.VideoCapturer;
+import org.webrtc.VideoSink;
+
+public class CallClient {
+ private static final String TAG = "CallClient";
+ private static final int CAPTURE_WIDTH = 640;
+ private static final int CAPTURE_HEIGHT = 480;
+ private static final int CAPTURE_FPS = 30;
+
+ private final Context applicationContext;
+ private final HandlerThread thread;
+ private final Handler handler;
+
+ private long nativeClient;
+ private SurfaceTextureHelper surfaceTextureHelper;
+ private VideoCapturer videoCapturer;
+
+ public CallClient(Context applicationContext) {
+ this.applicationContext = applicationContext;
+ thread = new HandlerThread(TAG + "Thread");
+ thread.start();
+ handler = new Handler(thread.getLooper());
+ handler.post(() -> { nativeClient = nativeCreateClient(); });
+ }
+
+ public void call(VideoSink localSink, VideoSink remoteSink, VideoCapturer videoCapturer,
+ SurfaceTextureHelper videoCapturerSurfaceTextureHelper) {
+ handler.post(() -> {
+ nativeCall(nativeClient, localSink, remoteSink);
+ videoCapturer.initialize(videoCapturerSurfaceTextureHelper, applicationContext,
+ nativeGetJavaVideoCapturerObserver(nativeClient));
+ videoCapturer.startCapture(CAPTURE_WIDTH, CAPTURE_HEIGHT, CAPTURE_FPS);
+ });
+ }
+
+ public void hangup() {
+ handler.post(() -> { nativeHangup(nativeClient); });
+ }
+
+ public void close() {
+ handler.post(() -> {
+ nativeDelete(nativeClient);
+ nativeClient = 0;
+ });
+ thread.quitSafely();
+ }
+
+ private static native long nativeCreateClient();
+ private static native void nativeCall(
+ long nativeAndroidCallClient, VideoSink localSink, VideoSink remoteSink);
+ private static native void nativeHangup(long nativeAndroidCallClient);
+ private static native void nativeDelete(long nativeAndroidCallClient);
+ private static native CapturerObserver nativeGetJavaVideoCapturerObserver(
+ long nativeAndroidCallClient);
+}
diff --git a/third_party/libwebrtc/examples/androidnativeapi/java/org/webrtc/examples/androidnativeapi/MainActivity.java b/third_party/libwebrtc/examples/androidnativeapi/java/org/webrtc/examples/androidnativeapi/MainActivity.java
new file mode 100644
index 0000000000..72fc0a686d
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidnativeapi/java/org/webrtc/examples/androidnativeapi/MainActivity.java
@@ -0,0 +1,120 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.examples.androidnativeapi;
+
+import android.app.Activity;
+import android.content.Context;
+import android.os.Bundle;
+import android.widget.Button;
+import androidx.annotation.Nullable;
+import org.webrtc.Camera1Enumerator;
+import org.webrtc.Camera2Enumerator;
+import org.webrtc.CameraEnumerator;
+import org.webrtc.ContextUtils;
+import org.webrtc.EglBase;
+import org.webrtc.GlRectDrawer;
+import org.webrtc.SurfaceTextureHelper;
+import org.webrtc.SurfaceViewRenderer;
+import org.webrtc.VideoCapturer;
+
+public class MainActivity extends Activity {
+ private @Nullable CallClient callClient;
+ private @Nullable EglBase eglBase;
+ private @Nullable SurfaceViewRenderer localRenderer;
+ private @Nullable SurfaceViewRenderer remoteRenderer;
+ private @Nullable SurfaceTextureHelper videoCapturerSurfaceTextureHelper;
+ private @Nullable VideoCapturer videoCapturer;
+
+ @Override
+ protected void onCreate(Bundle savedInstance) {
+ ContextUtils.initialize(getApplicationContext());
+
+ super.onCreate(savedInstance);
+ setContentView(R.layout.activity_main);
+
+ System.loadLibrary("examples_androidnativeapi_jni");
+ callClient = new CallClient(getApplicationContext());
+
+ Button callButton = (Button) findViewById(R.id.call_button);
+ callButton.setOnClickListener((view) -> {
+ if (videoCapturer == null) {
+ videoCapturer = createVideoCapturer(getApplicationContext());
+ }
+ callClient.call(
+ localRenderer, remoteRenderer, videoCapturer, videoCapturerSurfaceTextureHelper);
+ });
+
+ Button hangupButton = (Button) findViewById(R.id.hangup_button);
+ hangupButton.setOnClickListener((view) -> { hangup(); });
+ }
+
+ @Override
+ protected void onStart() {
+ super.onStart();
+
+ eglBase = EglBase.create(null /* sharedContext */, EglBase.CONFIG_PLAIN);
+ localRenderer = (SurfaceViewRenderer) findViewById(R.id.local_renderer);
+ remoteRenderer = (SurfaceViewRenderer) findViewById(R.id.remote_renderer);
+
+ localRenderer.init(eglBase.getEglBaseContext(), null /* rendererEvents */, EglBase.CONFIG_PLAIN,
+ new GlRectDrawer());
+ remoteRenderer.init(eglBase.getEglBaseContext(), null /* rendererEvents */,
+ EglBase.CONFIG_PLAIN, new GlRectDrawer());
+
+ videoCapturerSurfaceTextureHelper =
+ SurfaceTextureHelper.create("VideoCapturerThread", eglBase.getEglBaseContext());
+ }
+
+ @Override
+ protected void onStop() {
+ hangup();
+
+ localRenderer.release();
+ remoteRenderer.release();
+ videoCapturerSurfaceTextureHelper.dispose();
+ eglBase.release();
+
+ localRenderer = null;
+ remoteRenderer = null;
+ videoCapturerSurfaceTextureHelper = null;
+ eglBase = null;
+
+ super.onStop();
+ }
+
+ @Override
+ protected void onDestroy() {
+ callClient.close();
+ callClient = null;
+
+ super.onDestroy();
+ }
+
+ private void hangup() {
+ if (videoCapturer != null) {
+ try {
+ videoCapturer.stopCapture();
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ videoCapturer.dispose();
+ videoCapturer = null;
+ }
+ callClient.hangup();
+ }
+
+ private static VideoCapturer createVideoCapturer(Context context) {
+ CameraEnumerator enumerator = Camera2Enumerator.isSupported(context)
+ ? new Camera2Enumerator(context)
+ : new Camera1Enumerator();
+ return enumerator.createCapturer(enumerator.getDeviceNames()[0], null /* eventsHandler */);
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidnativeapi/jni/android_call_client.cc b/third_party/libwebrtc/examples/androidnativeapi/jni/android_call_client.cc
new file mode 100644
index 0000000000..2713a563cd
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidnativeapi/jni/android_call_client.cc
@@ -0,0 +1,293 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "examples/androidnativeapi/jni/android_call_client.h"
+
+#include <memory>
+#include <utility>
+
+#include "api/peer_connection_interface.h"
+#include "api/rtc_event_log/rtc_event_log_factory.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "examples/androidnativeapi/generated_jni/CallClient_jni.h"
+#include "media/engine/internal_decoder_factory.h"
+#include "media/engine/internal_encoder_factory.h"
+#include "media/engine/webrtc_media_engine.h"
+#include "media/engine/webrtc_media_engine_defaults.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/native_api/video/wrapper.h"
+
+namespace webrtc_examples {
+
+class AndroidCallClient::PCObserver : public webrtc::PeerConnectionObserver {
+ public:
+ explicit PCObserver(AndroidCallClient* client);
+
+ void OnSignalingChange(
+ webrtc::PeerConnectionInterface::SignalingState new_state) override;
+ void OnDataChannel(
+ rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel) override;
+ void OnRenegotiationNeeded() override;
+ void OnIceConnectionChange(
+ webrtc::PeerConnectionInterface::IceConnectionState new_state) override;
+ void OnIceGatheringChange(
+ webrtc::PeerConnectionInterface::IceGatheringState new_state) override;
+ void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override;
+
+ private:
+ AndroidCallClient* const client_;
+};
+
+namespace {
+
+class CreateOfferObserver : public webrtc::CreateSessionDescriptionObserver {
+ public:
+ explicit CreateOfferObserver(
+ rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc);
+
+ void OnSuccess(webrtc::SessionDescriptionInterface* desc) override;
+ void OnFailure(webrtc::RTCError error) override;
+
+ private:
+ const rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc_;
+};
+
+class SetRemoteSessionDescriptionObserver
+ : public webrtc::SetRemoteDescriptionObserverInterface {
+ public:
+ void OnSetRemoteDescriptionComplete(webrtc::RTCError error) override;
+};
+
+class SetLocalSessionDescriptionObserver
+ : public webrtc::SetSessionDescriptionObserver {
+ public:
+ void OnSuccess() override;
+ void OnFailure(webrtc::RTCError error) override;
+};
+
+} // namespace
+
+AndroidCallClient::AndroidCallClient()
+ : call_started_(false), pc_observer_(std::make_unique<PCObserver>(this)) {
+ thread_checker_.Detach();
+ CreatePeerConnectionFactory();
+}
+
+AndroidCallClient::~AndroidCallClient() = default;
+
+void AndroidCallClient::Call(JNIEnv* env,
+ const webrtc::JavaRef<jobject>& local_sink,
+ const webrtc::JavaRef<jobject>& remote_sink) {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+
+ webrtc::MutexLock lock(&pc_mutex_);
+ if (call_started_) {
+ RTC_LOG(LS_WARNING) << "Call already started.";
+ return;
+ }
+ call_started_ = true;
+
+ local_sink_ = webrtc::JavaToNativeVideoSink(env, local_sink.obj());
+ remote_sink_ = webrtc::JavaToNativeVideoSink(env, remote_sink.obj());
+
+ video_source_ = webrtc::CreateJavaVideoSource(env, signaling_thread_.get(),
+ /* is_screencast= */ false,
+ /* align_timestamps= */ true);
+
+ CreatePeerConnection();
+ Connect();
+}
+
+void AndroidCallClient::Hangup(JNIEnv* env) {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+
+ call_started_ = false;
+
+ {
+ webrtc::MutexLock lock(&pc_mutex_);
+ if (pc_ != nullptr) {
+ pc_->Close();
+ pc_ = nullptr;
+ }
+ }
+
+ local_sink_ = nullptr;
+ remote_sink_ = nullptr;
+ video_source_ = nullptr;
+}
+
+void AndroidCallClient::Delete(JNIEnv* env) {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+
+ delete this;
+}
+
+webrtc::ScopedJavaLocalRef<jobject>
+AndroidCallClient::GetJavaVideoCapturerObserver(JNIEnv* env) {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+
+ return video_source_->GetJavaVideoCapturerObserver(env);
+}
+
+void AndroidCallClient::CreatePeerConnectionFactory() {
+ network_thread_ = rtc::Thread::CreateWithSocketServer();
+ network_thread_->SetName("network_thread", nullptr);
+ RTC_CHECK(network_thread_->Start()) << "Failed to start thread";
+
+ worker_thread_ = rtc::Thread::Create();
+ worker_thread_->SetName("worker_thread", nullptr);
+ RTC_CHECK(worker_thread_->Start()) << "Failed to start thread";
+
+ signaling_thread_ = rtc::Thread::Create();
+ signaling_thread_->SetName("signaling_thread", nullptr);
+ RTC_CHECK(signaling_thread_->Start()) << "Failed to start thread";
+
+ webrtc::PeerConnectionFactoryDependencies pcf_deps;
+ pcf_deps.network_thread = network_thread_.get();
+ pcf_deps.worker_thread = worker_thread_.get();
+ pcf_deps.signaling_thread = signaling_thread_.get();
+ pcf_deps.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory();
+ pcf_deps.call_factory = webrtc::CreateCallFactory();
+ pcf_deps.event_log_factory = std::make_unique<webrtc::RtcEventLogFactory>(
+ pcf_deps.task_queue_factory.get());
+
+ cricket::MediaEngineDependencies media_deps;
+ media_deps.task_queue_factory = pcf_deps.task_queue_factory.get();
+ media_deps.video_encoder_factory =
+ std::make_unique<webrtc::InternalEncoderFactory>();
+ media_deps.video_decoder_factory =
+ std::make_unique<webrtc::InternalDecoderFactory>();
+ webrtc::SetMediaEngineDefaults(&media_deps);
+ pcf_deps.media_engine = cricket::CreateMediaEngine(std::move(media_deps));
+ RTC_LOG(LS_INFO) << "Media engine created: " << pcf_deps.media_engine.get();
+
+ pcf_ = CreateModularPeerConnectionFactory(std::move(pcf_deps));
+ RTC_LOG(LS_INFO) << "PeerConnectionFactory created: " << pcf_.get();
+}
+
+void AndroidCallClient::CreatePeerConnection() {
+ webrtc::MutexLock lock(&pc_mutex_);
+ webrtc::PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
+ // Encryption has to be disabled for loopback to work.
+ webrtc::PeerConnectionFactoryInterface::Options options;
+ options.disable_encryption = true;
+ pcf_->SetOptions(options);
+ webrtc::PeerConnectionDependencies deps(pc_observer_.get());
+ pc_ = pcf_->CreatePeerConnectionOrError(config, std::move(deps)).MoveValue();
+
+ RTC_LOG(LS_INFO) << "PeerConnection created: " << pc_.get();
+
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> local_video_track =
+ pcf_->CreateVideoTrack(video_source_, "video");
+ local_video_track->AddOrUpdateSink(local_sink_.get(), rtc::VideoSinkWants());
+ pc_->AddTransceiver(local_video_track);
+ RTC_LOG(LS_INFO) << "Local video sink set up: " << local_video_track.get();
+
+ for (const rtc::scoped_refptr<webrtc::RtpTransceiverInterface>& tranceiver :
+ pc_->GetTransceivers()) {
+ rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> track =
+ tranceiver->receiver()->track();
+ if (track &&
+ track->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) {
+ static_cast<webrtc::VideoTrackInterface*>(track.get())
+ ->AddOrUpdateSink(remote_sink_.get(), rtc::VideoSinkWants());
+ RTC_LOG(LS_INFO) << "Remote video sink set up: " << track.get();
+ break;
+ }
+ }
+}
+
+void AndroidCallClient::Connect() {
+ webrtc::MutexLock lock(&pc_mutex_);
+ pc_->CreateOffer(rtc::make_ref_counted<CreateOfferObserver>(pc_).get(),
+ webrtc::PeerConnectionInterface::RTCOfferAnswerOptions());
+}
+
+AndroidCallClient::PCObserver::PCObserver(AndroidCallClient* client)
+ : client_(client) {}
+
+void AndroidCallClient::PCObserver::OnSignalingChange(
+ webrtc::PeerConnectionInterface::SignalingState new_state) {
+ RTC_LOG(LS_INFO) << "OnSignalingChange: " << new_state;
+}
+
+void AndroidCallClient::PCObserver::OnDataChannel(
+ rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel) {
+ RTC_LOG(LS_INFO) << "OnDataChannel";
+}
+
+void AndroidCallClient::PCObserver::OnRenegotiationNeeded() {
+ RTC_LOG(LS_INFO) << "OnRenegotiationNeeded";
+}
+
+void AndroidCallClient::PCObserver::OnIceConnectionChange(
+ webrtc::PeerConnectionInterface::IceConnectionState new_state) {
+ RTC_LOG(LS_INFO) << "OnIceConnectionChange: " << new_state;
+}
+
+void AndroidCallClient::PCObserver::OnIceGatheringChange(
+ webrtc::PeerConnectionInterface::IceGatheringState new_state) {
+ RTC_LOG(LS_INFO) << "OnIceGatheringChange: " << new_state;
+}
+
+void AndroidCallClient::PCObserver::OnIceCandidate(
+ const webrtc::IceCandidateInterface* candidate) {
+ RTC_LOG(LS_INFO) << "OnIceCandidate: " << candidate->server_url();
+ webrtc::MutexLock lock(&client_->pc_mutex_);
+ RTC_DCHECK(client_->pc_ != nullptr);
+ client_->pc_->AddIceCandidate(candidate);
+}
+
+CreateOfferObserver::CreateOfferObserver(
+ rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc)
+ : pc_(pc) {}
+
+void CreateOfferObserver::OnSuccess(webrtc::SessionDescriptionInterface* desc) {
+ std::string sdp;
+ desc->ToString(&sdp);
+ RTC_LOG(LS_INFO) << "Created offer: " << sdp;
+
+ // Ownership of desc was transferred to us, now we transfer it forward.
+ pc_->SetLocalDescription(
+ rtc::make_ref_counted<SetLocalSessionDescriptionObserver>().get(), desc);
+
+ // Generate a fake answer.
+ std::unique_ptr<webrtc::SessionDescriptionInterface> answer(
+ webrtc::CreateSessionDescription(webrtc::SdpType::kAnswer, sdp));
+ pc_->SetRemoteDescription(
+ std::move(answer),
+ rtc::make_ref_counted<SetRemoteSessionDescriptionObserver>());
+}
+
+void CreateOfferObserver::OnFailure(webrtc::RTCError error) {
+ RTC_LOG(LS_INFO) << "Failed to create offer: " << ToString(error.type())
+ << ": " << error.message();
+}
+
+void SetRemoteSessionDescriptionObserver::OnSetRemoteDescriptionComplete(
+ webrtc::RTCError error) {
+ RTC_LOG(LS_INFO) << "Set remote description: " << error.message();
+}
+
+void SetLocalSessionDescriptionObserver::OnSuccess() {
+ RTC_LOG(LS_INFO) << "Set local description success!";
+}
+
+void SetLocalSessionDescriptionObserver::OnFailure(webrtc::RTCError error) {
+ RTC_LOG(LS_INFO) << "Set local description failure: "
+ << ToString(error.type()) << ": " << error.message();
+}
+
+static jlong JNI_CallClient_CreateClient(JNIEnv* env) {
+ return webrtc::NativeToJavaPointer(new webrtc_examples::AndroidCallClient());
+}
+
+} // namespace webrtc_examples
diff --git a/third_party/libwebrtc/examples/androidnativeapi/jni/android_call_client.h b/third_party/libwebrtc/examples/androidnativeapi/jni/android_call_client.h
new file mode 100644
index 0000000000..c9153d09bd
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidnativeapi/jni/android_call_client.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef EXAMPLES_ANDROIDNATIVEAPI_JNI_ANDROID_CALL_CLIENT_H_
+#define EXAMPLES_ANDROIDNATIVEAPI_JNI_ANDROID_CALL_CLIENT_H_
+
+#include <jni.h>
+
+#include <memory>
+#include <string>
+
+#include "api/peer_connection_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+#include "sdk/android/native_api/video/video_source.h"
+
+namespace webrtc_examples {
+
+class AndroidCallClient {
+ public:
+ AndroidCallClient();
+ ~AndroidCallClient();
+
+ void Call(JNIEnv* env,
+ const webrtc::JavaRef<jobject>& local_sink,
+ const webrtc::JavaRef<jobject>& remote_sink);
+ void Hangup(JNIEnv* env);
+ // A helper method for Java code to delete this object. Calls delete this.
+ void Delete(JNIEnv* env);
+
+ webrtc::ScopedJavaLocalRef<jobject> GetJavaVideoCapturerObserver(JNIEnv* env);
+
+ private:
+ class PCObserver;
+
+ void CreatePeerConnectionFactory() RTC_RUN_ON(thread_checker_);
+ void CreatePeerConnection() RTC_RUN_ON(thread_checker_);
+ void Connect() RTC_RUN_ON(thread_checker_);
+
+ webrtc::SequenceChecker thread_checker_;
+
+ bool call_started_ RTC_GUARDED_BY(thread_checker_);
+
+ const std::unique_ptr<PCObserver> pc_observer_;
+
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf_
+ RTC_GUARDED_BY(thread_checker_);
+ std::unique_ptr<rtc::Thread> network_thread_ RTC_GUARDED_BY(thread_checker_);
+ std::unique_ptr<rtc::Thread> worker_thread_ RTC_GUARDED_BY(thread_checker_);
+ std::unique_ptr<rtc::Thread> signaling_thread_
+ RTC_GUARDED_BY(thread_checker_);
+
+ std::unique_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> local_sink_
+ RTC_GUARDED_BY(thread_checker_);
+ std::unique_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> remote_sink_
+ RTC_GUARDED_BY(thread_checker_);
+ rtc::scoped_refptr<webrtc::JavaVideoTrackSourceInterface> video_source_
+ RTC_GUARDED_BY(thread_checker_);
+
+ webrtc::Mutex pc_mutex_;
+ rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc_
+ RTC_GUARDED_BY(pc_mutex_);
+};
+
+} // namespace webrtc_examples
+
+#endif // EXAMPLES_ANDROIDNATIVEAPI_JNI_ANDROID_CALL_CLIENT_H_
diff --git a/third_party/libwebrtc/examples/androidnativeapi/jni/onload.cc b/third_party/libwebrtc/examples/androidnativeapi/jni/onload.cc
new file mode 100644
index 0000000000..6ea5275d2a
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidnativeapi/jni/onload.cc
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+
+#include "modules/utility/include/jvm_android.h"
+#include "rtc_base/ssl_adapter.h"
+#include "sdk/android/native_api/base/init.h"
+
+namespace webrtc_examples {
+
+extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM* jvm, void* reserved) {
+ webrtc::InitAndroid(jvm);
+ webrtc::JVM::Initialize(jvm);
+ RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()";
+ return JNI_VERSION_1_6;
+}
+
+extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM* jvm, void* reserved) {
+ RTC_CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()";
+}
+
+} // namespace webrtc_examples
diff --git a/third_party/libwebrtc/examples/androidnativeapi/res/layout/activity_main.xml b/third_party/libwebrtc/examples/androidnativeapi/res/layout/activity_main.xml
new file mode 100644
index 0000000000..ac8037320f
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidnativeapi/res/layout/activity_main.xml
@@ -0,0 +1,52 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ xmlns:app="http://schemas.android.com/apk/res-auto"
+ xmlns:tools="http://schemas.android.com/tools"
+ android:orientation="vertical"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:padding="8dp"
+ tools:context="org.webrtc.examples.androidnativeapi.MainActivity">
+
+ <org.webrtc.SurfaceViewRenderer
+ android:id="@+id/local_renderer"
+ android:layout_width="match_parent"
+ android:layout_height="0dp"
+ android:layout_weight="1"
+ android:layout_margin="8dp"/>
+
+ <org.webrtc.SurfaceViewRenderer
+ android:id="@+id/remote_renderer"
+ android:layout_width="match_parent"
+ android:layout_height="0dp"
+ android:layout_weight="1"
+ android:layout_margin="8dp"/>
+
+
+ <LinearLayout
+ android:orientation="horizontal"
+ android:layout_width="match_parent"
+ android:layout_height="48dp"
+ style="?android:attr/buttonBarStyle">
+
+ <Button
+ android:id="@+id/call_button"
+ android:text="@string/call_button"
+ style="?android:attr/buttonBarButtonStyle"
+ android:layout_width="0dp"
+ android:layout_height="48dp"
+ android:layout_weight="1"
+ android:layout_margin="8dp"/>
+
+ <Button
+ android:id="@+id/hangup_button"
+ android:text="@string/hangup_button"
+ style="?android:attr/buttonBarButtonStyle"
+ android:layout_width="0dp"
+ android:layout_height="48dp"
+ android:layout_weight="1"
+ android:layout_margin="8dp"/>
+
+ </LinearLayout>
+
+</LinearLayout>
diff --git a/third_party/libwebrtc/examples/androidnativeapi/res/values/strings.xml b/third_party/libwebrtc/examples/androidnativeapi/res/values/strings.xml
new file mode 100644
index 0000000000..a00920c92b
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidnativeapi/res/values/strings.xml
@@ -0,0 +1,5 @@
+<resources>
+ <string name="app_name">androidnativeapi</string>
+ <string name="call_button">Call</string>
+ <string name="hangup_button">Hangup</string>
+</resources>
diff --git a/third_party/libwebrtc/examples/androidtests/AndroidManifest.xml b/third_party/libwebrtc/examples/androidtests/AndroidManifest.xml
new file mode 100644
index 0000000000..90e130e735
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidtests/AndroidManifest.xml
@@ -0,0 +1,26 @@
+<!--
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+-->
+
+<manifest
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ xmlns:tools="http://schemas.android.com/tools"
+ package="org.appspot.apprtc.test">
+
+ <uses-permission android:name="android.permission.RUN_INSTRUMENTATION" />
+
+ <application>
+ <uses-library android:name="android.test.runner" />
+ </application>
+
+ <instrumentation android:name="androidx.test.runner.AndroidJUnitRunner"
+ tools:ignore="MissingPrefix"
+ android:targetPackage="org.appspot.apprtc"
+ android:label="Tests for AppRTCMobile"/>
+</manifest>
diff --git a/third_party/libwebrtc/examples/androidtests/OWNERS b/third_party/libwebrtc/examples/androidtests/OWNERS
new file mode 100644
index 0000000000..cf092a316a
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidtests/OWNERS
@@ -0,0 +1 @@
+xalep@webrtc.org
diff --git a/third_party/libwebrtc/examples/androidtests/README b/third_party/libwebrtc/examples/androidtests/README
new file mode 100644
index 0000000000..0701b0e896
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidtests/README
@@ -0,0 +1,14 @@
+This directory contains an example unit test for Android AppRTCMobile.
+
+Example of building & using the app:
+
+- Build Android AppRTCMobile and AppRTCMobile unit test:
+cd <path/to/webrtc>/src
+ninja -C out/Debug AppRTCMobile_test_apk
+
+- Install AppRTCMobile and AppRTCMobileTest:
+adb install -r out/Debug/apks/AppRTCMobile.apk
+adb install -r out/Debug/apks/AppRTCMobileTest.apk
+
+- Run unit tests:
+adb shell am instrument -w org.appspot.apprtc.test/android.test.InstrumentationTestRunner
diff --git a/third_party/libwebrtc/examples/androidtests/ant.properties b/third_party/libwebrtc/examples/androidtests/ant.properties
new file mode 100644
index 0000000000..ec7d042885
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidtests/ant.properties
@@ -0,0 +1,18 @@
+# This file is used to override default values used by the Ant build system.
+#
+# This file must be checked into Version Control Systems, as it is
+# integral to the build system of your project.
+
+# This file is only used by the Ant script.
+
+# You can use this to override default values such as
+# 'source.dir' for the location of your java source folder and
+# 'out.dir' for the location of your output folder.
+
+# You can also use it define how the release builds are signed by declaring
+# the following properties:
+# 'key.store' for the location of your keystore and
+# 'key.alias' for the name of the key to use.
+# The password will be asked during the build when you use the 'release' target.
+
+tested.project.dir=../android
diff --git a/third_party/libwebrtc/examples/androidtests/build.xml b/third_party/libwebrtc/examples/androidtests/build.xml
new file mode 100644
index 0000000000..95847b74c2
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidtests/build.xml
@@ -0,0 +1,92 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project name="AppRTCMobileTest" default="help">
+
+ <!-- The local.properties file is created and updated by the 'android' tool.
+ It contains the path to the SDK. It should *NOT* be checked into
+ Version Control Systems. -->
+ <property file="local.properties" />
+
+ <!-- The ant.properties file can be created by you. It is only edited by the
+ 'android' tool to add properties to it.
+ This is the place to change some Ant specific build properties.
+ Here are some properties you may want to change/update:
+
+ source.dir
+ The name of the source directory. Default is 'src'.
+ out.dir
+ The name of the output directory. Default is 'bin'.
+
+ For other overridable properties, look at the beginning of the rules
+ files in the SDK, at tools/ant/build.xml
+
+ Properties related to the SDK location or the project target should
+ be updated using the 'android' tool with the 'update' action.
+
+ This file is an integral part of the build system for your
+ application and should be checked into Version Control Systems.
+
+ -->
+ <property file="ant.properties" />
+
+ <!-- if sdk.dir was not set from one of the property file, then
+ get it from the ANDROID_HOME env var.
+ This must be done before we load project.properties since
+ the proguard config can use sdk.dir -->
+ <property environment="env" />
+ <condition property="sdk.dir" value="${env.ANDROID_SDK_ROOT}">
+ <isset property="env.ANDROID_SDK_ROOT" />
+ </condition>
+
+ <!-- The project.properties file is created and updated by the 'android'
+ tool, as well as ADT.
+
+ This contains project specific properties such as project target, and library
+ dependencies. Lower level build properties are stored in ant.properties
+ (or in .classpath for Eclipse projects).
+
+ This file is an integral part of the build system for your
+ application and should be checked into Version Control Systems. -->
+ <loadproperties srcFile="project.properties" />
+
+ <!-- quick check on sdk.dir -->
+ <fail
+ message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
+ unless="sdk.dir"
+ />
+
+ <!--
+ Import per project custom build rules if present at the root of the project.
+ This is the place to put custom intermediary targets such as:
+ -pre-build
+ -pre-compile
+ -post-compile (This is typically used for code obfuscation.
+ Compiled code location: ${out.classes.absolute.dir}
+ If this is not done in place, override ${out.dex.input.absolute.dir})
+ -post-package
+ -post-build
+ -pre-clean
+ -->
+ <import file="custom_rules.xml" optional="true" />
+
+ <!-- Import the actual build file.
+
+ To customize existing targets, there are two options:
+ - Customize only one target:
+ - copy/paste the target into this file, *before* the
+ <import> task.
+ - customize it to your needs.
+ - Customize the whole content of build.xml
+ - copy/paste the content of the rules files (minus the top node)
+ into this file, replacing the <import> task.
+ - customize to your needs.
+
+ ***********************
+ ****** IMPORTANT ******
+ ***********************
+ In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
+ in order to avoid having your file be overridden by tools such as "android update project"
+ -->
+ <!-- version-tag: 1 -->
+ <import file="${sdk.dir}/tools/ant/build.xml" />
+
+</project>
diff --git a/third_party/libwebrtc/examples/androidtests/project.properties b/third_party/libwebrtc/examples/androidtests/project.properties
new file mode 100644
index 0000000000..a6ca533fe3
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidtests/project.properties
@@ -0,0 +1,16 @@
+# This file is automatically generated by Android Tools.
+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
+#
+# This file must be checked in Version Control Systems.
+#
+# To customize properties used by the Ant build system edit
+# "ant.properties", and override values to adapt the script to your
+# project structure.
+#
+# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
+#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
+
+# Project target.
+target=android-22
+
+java.compilerargs=-Xlint:all -Werror
diff --git a/third_party/libwebrtc/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java b/third_party/libwebrtc/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java
new file mode 100644
index 0000000000..9c3c779600
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java
@@ -0,0 +1,637 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.appspot.apprtc.test;
+
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import android.os.Build;
+import android.util.Log;
+import androidx.test.InstrumentationRegistry;
+import androidx.test.filters.SmallTest;
+import androidx.test.runner.AndroidJUnit4;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+import org.appspot.apprtc.AppRTCClient.SignalingParameters;
+import org.appspot.apprtc.PeerConnectionClient;
+import org.appspot.apprtc.PeerConnectionClient.PeerConnectionEvents;
+import org.appspot.apprtc.PeerConnectionClient.PeerConnectionParameters;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.webrtc.Camera1Enumerator;
+import org.webrtc.Camera2Enumerator;
+import org.webrtc.CameraEnumerator;
+import org.webrtc.EglBase;
+import org.webrtc.IceCandidate;
+import org.webrtc.PeerConnection;
+import org.webrtc.PeerConnectionFactory;
+import org.webrtc.RTCStatsReport;
+import org.webrtc.SessionDescription;
+import org.webrtc.VideoCapturer;
+import org.webrtc.VideoFrame;
+import org.webrtc.VideoSink;
+
+@RunWith(AndroidJUnit4.class)
+public class PeerConnectionClientTest implements PeerConnectionEvents {
+ private static final String TAG = "RTCClientTest";
+ private static final int ICE_CONNECTION_WAIT_TIMEOUT = 10000;
+ private static final int WAIT_TIMEOUT = 7000;
+ private static final int CAMERA_SWITCH_ATTEMPTS = 3;
+ private static final int VIDEO_RESTART_ATTEMPTS = 3;
+ private static final int CAPTURE_FORMAT_CHANGE_ATTEMPTS = 3;
+ private static final int VIDEO_RESTART_TIMEOUT = 500;
+ private static final int EXPECTED_VIDEO_FRAMES = 10;
+ private static final String VIDEO_CODEC_VP8 = "VP8";
+ private static final String VIDEO_CODEC_VP9 = "VP9";
+ private static final String VIDEO_CODEC_H264 = "H264";
+ private static final int AUDIO_RUN_TIMEOUT = 1000;
+ private static final String LOCAL_RENDERER_NAME = "Local renderer";
+ private static final String REMOTE_RENDERER_NAME = "Remote renderer";
+
+ private static final int MAX_VIDEO_FPS = 30;
+ private static final int WIDTH_VGA = 640;
+ private static final int HEIGHT_VGA = 480;
+ private static final int WIDTH_QVGA = 320;
+ private static final int HEIGHT_QVGA = 240;
+
+ // The peer connection client is assumed to be thread safe in itself; the
+ // reference is written by the test thread and read by worker threads.
+ private volatile PeerConnectionClient pcClient;
+ private volatile boolean loopback;
+
+ // These are protected by their respective event objects.
+ private ExecutorService signalingExecutor;
+ private boolean isClosed;
+ private boolean isIceConnected;
+ private SessionDescription localDesc;
+ private List<IceCandidate> iceCandidates = new ArrayList<>();
+ private final Object localDescEvent = new Object();
+ private final Object iceCandidateEvent = new Object();
+ private final Object iceConnectedEvent = new Object();
+ private final Object closeEvent = new Object();
+
+ // Mock VideoSink implementation.
+ private static class MockSink implements VideoSink {
+ // These are protected by 'this' since we gets called from worker threads.
+ private String rendererName;
+ private boolean renderFrameCalled;
+
+ // Thread-safe in itself.
+ private CountDownLatch doneRendering;
+
+ public MockSink(int expectedFrames, String rendererName) {
+ this.rendererName = rendererName;
+ reset(expectedFrames);
+ }
+
+ // Resets render to wait for new amount of video frames.
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void reset(int expectedFrames) {
+ renderFrameCalled = false;
+ doneRendering = new CountDownLatch(expectedFrames);
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onFrame(VideoFrame frame) {
+ if (!renderFrameCalled) {
+ if (rendererName != null) {
+ Log.d(TAG,
+ rendererName + " render frame: " + frame.getRotatedWidth() + " x "
+ + frame.getRotatedHeight());
+ } else {
+ Log.d(TAG, "Render frame: " + frame.getRotatedWidth() + " x " + frame.getRotatedHeight());
+ }
+ }
+ renderFrameCalled = true;
+ doneRendering.countDown();
+ }
+
+ // This method shouldn't hold any locks or touch member variables since it
+ // blocks.
+ public boolean waitForFramesRendered(int timeoutMs) throws InterruptedException {
+ doneRendering.await(timeoutMs, TimeUnit.MILLISECONDS);
+ return (doneRendering.getCount() <= 0);
+ }
+ }
+
+ // Peer connection events implementation.
+ @Override
+ public void onLocalDescription(SessionDescription desc) {
+ Log.d(TAG, "Local description type: " + desc.type);
+ synchronized (localDescEvent) {
+ localDesc = desc;
+ localDescEvent.notifyAll();
+ }
+ }
+
+ @Override
+ public void onIceCandidate(final IceCandidate candidate) {
+ synchronized (iceCandidateEvent) {
+ Log.d(TAG, "IceCandidate #" + iceCandidates.size() + " : " + candidate.toString());
+ if (loopback) {
+ // Loopback local ICE candidate in a separate thread to avoid adding
+ // remote ICE candidate in a local ICE candidate callback.
+ signalingExecutor.execute(new Runnable() {
+ @Override
+ public void run() {
+ pcClient.addRemoteIceCandidate(candidate);
+ }
+ });
+ }
+ iceCandidates.add(candidate);
+ iceCandidateEvent.notifyAll();
+ }
+ }
+
+ @Override
+ public void onIceCandidatesRemoved(final IceCandidate[] candidates) {
+ // TODO(honghaiz): Add this for tests.
+ }
+
+ @Override
+ public void onIceConnected() {
+ Log.d(TAG, "ICE Connected");
+ synchronized (iceConnectedEvent) {
+ isIceConnected = true;
+ iceConnectedEvent.notifyAll();
+ }
+ }
+
+ @Override
+ public void onIceDisconnected() {
+ Log.d(TAG, "ICE Disconnected");
+ synchronized (iceConnectedEvent) {
+ isIceConnected = false;
+ iceConnectedEvent.notifyAll();
+ }
+ }
+
+ @Override
+ public void onConnected() {
+ Log.d(TAG, "DTLS Connected");
+ }
+
+ @Override
+ public void onDisconnected() {
+ Log.d(TAG, "DTLS Disconnected");
+ }
+
+ @Override
+ public void onPeerConnectionClosed() {
+ Log.d(TAG, "PeerConnection closed");
+ synchronized (closeEvent) {
+ isClosed = true;
+ closeEvent.notifyAll();
+ }
+ }
+
+ @Override
+ public void onPeerConnectionError(String description) {
+ fail("PC Error: " + description);
+ }
+
+ @Override
+ public void onPeerConnectionStatsReady(final RTCStatsReport report) {}
+
+ // Helper wait functions.
+ private boolean waitForLocalDescription(int timeoutMs) throws InterruptedException {
+ synchronized (localDescEvent) {
+ final long endTimeMs = System.currentTimeMillis() + timeoutMs;
+ while (localDesc == null) {
+ final long waitTimeMs = endTimeMs - System.currentTimeMillis();
+ if (waitTimeMs < 0) {
+ return false;
+ }
+ localDescEvent.wait(waitTimeMs);
+ }
+ return true;
+ }
+ }
+
+ private boolean waitForIceCandidates(int timeoutMs) throws InterruptedException {
+ synchronized (iceCandidateEvent) {
+ final long endTimeMs = System.currentTimeMillis() + timeoutMs;
+ while (iceCandidates.size() == 0) {
+ final long waitTimeMs = endTimeMs - System.currentTimeMillis();
+ if (waitTimeMs < 0) {
+ return false;
+ }
+ iceCandidateEvent.wait(timeoutMs);
+ }
+ return true;
+ }
+ }
+
+ private boolean waitForIceConnected(int timeoutMs) throws InterruptedException {
+ synchronized (iceConnectedEvent) {
+ final long endTimeMs = System.currentTimeMillis() + timeoutMs;
+ while (!isIceConnected) {
+ final long waitTimeMs = endTimeMs - System.currentTimeMillis();
+ if (waitTimeMs < 0) {
+ Log.e(TAG, "ICE connection failure");
+ return false;
+ }
+ iceConnectedEvent.wait(timeoutMs);
+ }
+ return true;
+ }
+ }
+
+ private boolean waitForPeerConnectionClosed(int timeoutMs) throws InterruptedException {
+ synchronized (closeEvent) {
+ final long endTimeMs = System.currentTimeMillis() + timeoutMs;
+ while (!isClosed) {
+ final long waitTimeMs = endTimeMs - System.currentTimeMillis();
+ if (waitTimeMs < 0) {
+ return false;
+ }
+ closeEvent.wait(timeoutMs);
+ }
+ return true;
+ }
+ }
+
+ PeerConnectionClient createPeerConnectionClient(MockSink localRenderer, MockSink remoteRenderer,
+ PeerConnectionParameters peerConnectionParameters, VideoCapturer videoCapturer) {
+ List<PeerConnection.IceServer> iceServers = new ArrayList<>();
+ SignalingParameters signalingParameters =
+ new SignalingParameters(iceServers, true, // iceServers, initiator.
+ null, null, null, // clientId, wssUrl, wssPostUrl.
+ null, null); // offerSdp, iceCandidates.
+
+ final EglBase eglBase = EglBase.create();
+ PeerConnectionClient client =
+ new PeerConnectionClient(InstrumentationRegistry.getTargetContext(), eglBase,
+ peerConnectionParameters, this /* PeerConnectionEvents */);
+ PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
+ options.networkIgnoreMask = 0;
+ options.disableNetworkMonitor = true;
+ client.createPeerConnectionFactory(options);
+ client.createPeerConnection(localRenderer, remoteRenderer, videoCapturer, signalingParameters);
+ client.createOffer();
+ return client;
+ }
+
+ private PeerConnectionParameters createParametersForAudioCall() {
+ return new PeerConnectionParameters(false, /* videoCallEnabled */
+ true, /* loopback */
+ false, /* tracing */
+ // Video codec parameters.
+ 0, /* videoWidth */
+ 0, /* videoHeight */
+ 0, /* videoFps */
+ 0, /* videoStartBitrate */
+ "", /* videoCodec */
+ true, /* videoCodecHwAcceleration */
+ false, /* videoFlexfecEnabled */
+ // Audio codec parameters.
+ 0, /* audioStartBitrate */
+ "OPUS", /* audioCodec */
+ false, /* noAudioProcessing */
+ false, /* aecDump */
+ false, /* saveInputAudioToFile */
+ false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */,
+ false /* disableBuiltInNS */, false /* disableWebRtcAGC */, false /* enableRtcEventLog */,
+ null /* dataChannelParameters */);
+ }
+
+ private VideoCapturer createCameraCapturer(boolean captureToTexture) {
+ final boolean useCamera2 = captureToTexture
+ && Camera2Enumerator.isSupported(InstrumentationRegistry.getTargetContext());
+
+ CameraEnumerator enumerator;
+ if (useCamera2) {
+ enumerator = new Camera2Enumerator(InstrumentationRegistry.getTargetContext());
+ } else {
+ enumerator = new Camera1Enumerator(captureToTexture);
+ }
+ String deviceName = enumerator.getDeviceNames()[0];
+ return enumerator.createCapturer(deviceName, null);
+ }
+
+ private PeerConnectionParameters createParametersForVideoCall(String videoCodec) {
+ return new PeerConnectionParameters(true, /* videoCallEnabled */
+ true, /* loopback */
+ false, /* tracing */
+ // Video codec parameters.
+ 0, /* videoWidth */
+ 0, /* videoHeight */
+ 0, /* videoFps */
+ 0, /* videoStartBitrate */
+ videoCodec, /* videoCodec */
+ true, /* videoCodecHwAcceleration */
+ false, /* videoFlexfecEnabled */
+ // Audio codec parameters.
+ 0, /* audioStartBitrate */
+ "OPUS", /* audioCodec */
+ false, /* noAudioProcessing */
+ false, /* aecDump */
+ false, /* saveInputAudioToFile */
+ false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */,
+ false /* disableBuiltInNS */, false /* disableWebRtcAGC */, false /* enableRtcEventLog */,
+ null /* dataChannelParameters */);
+ }
+
+ @Before
+ public void setUp() {
+ signalingExecutor = Executors.newSingleThreadExecutor();
+ }
+
+ @After
+ public void tearDown() {
+ signalingExecutor.shutdown();
+ }
+
+ @Test
+ @SmallTest
+ public void testSetLocalOfferMakesVideoFlowLocally() throws InterruptedException {
+ Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally");
+ MockSink localRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
+ pcClient = createPeerConnectionClient(localRenderer,
+ new MockSink(/* expectedFrames= */ 0, /* rendererName= */ null),
+ createParametersForVideoCall(VIDEO_CODEC_VP8),
+ createCameraCapturer(false /* captureToTexture */));
+
+ // Wait for local description and ice candidates set events.
+ assertTrue("Local description was not set.", waitForLocalDescription(WAIT_TIMEOUT));
+ assertTrue("ICE candidates were not generated.", waitForIceCandidates(WAIT_TIMEOUT));
+
+ // Check that local video frames were rendered.
+ assertTrue(
+ "Local video frames were not rendered.", localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
+
+ pcClient.close();
+ assertTrue(
+ "PeerConnection close event was not received.", waitForPeerConnectionClosed(WAIT_TIMEOUT));
+ Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally Done.");
+ }
+
+ private void doLoopbackTest(PeerConnectionParameters parameters, VideoCapturer videoCapturer,
+ boolean decodeToTexture) throws InterruptedException {
+ loopback = true;
+ MockSink localRenderer = null;
+ MockSink remoteRenderer = null;
+ if (parameters.videoCallEnabled) {
+ Log.d(TAG, "testLoopback for video " + parameters.videoCodec);
+ localRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
+ remoteRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
+ } else {
+ Log.d(TAG, "testLoopback for audio.");
+ }
+ pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, parameters, videoCapturer);
+
+ // Wait for local description, change type to answer and set as remote description.
+ assertTrue("Local description was not set.", waitForLocalDescription(WAIT_TIMEOUT));
+ SessionDescription remoteDescription = new SessionDescription(
+ SessionDescription.Type.fromCanonicalForm("answer"), localDesc.description);
+ pcClient.setRemoteDescription(remoteDescription);
+
+ // Wait for ICE connection.
+ assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT));
+
+ if (parameters.videoCallEnabled) {
+ // Check that local and remote video frames were rendered.
+ assertTrue("Local video frames were not rendered.",
+ localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
+ assertTrue("Remote video frames were not rendered.",
+ remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
+ } else {
+ // For audio just sleep for 1 sec.
+ // TODO(glaznev): check how we can detect that remote audio was rendered.
+ Thread.sleep(AUDIO_RUN_TIMEOUT);
+ }
+
+ pcClient.close();
+ assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT));
+ Log.d(TAG, "testLoopback done.");
+ }
+
+ @Test
+ @SmallTest
+ public void testLoopbackAudio() throws InterruptedException {
+ doLoopbackTest(createParametersForAudioCall(), null, false /* decodeToTexture */);
+ }
+
+ @Test
+ @SmallTest
+ public void testLoopbackVp8() throws InterruptedException {
+ doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
+ createCameraCapturer(false /* captureToTexture */), false /* decodeToTexture */);
+ }
+
+ @Test
+ @SmallTest
+ public void testLoopbackVp9() throws InterruptedException {
+ doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9),
+ createCameraCapturer(false /* captureToTexture */), false /* decodeToTexture */);
+ }
+
+ @Test
+ @SmallTest
+ public void testLoopbackH264() throws InterruptedException {
+ doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
+ createCameraCapturer(false /* captureToTexture */), false /* decodeToTexture */);
+ }
+
+ @Test
+ @SmallTest
+ public void testLoopbackVp8DecodeToTexture() throws InterruptedException {
+ doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
+ createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
+ }
+
+ @Test
+ @SmallTest
+ public void testLoopbackVp9DecodeToTexture() throws InterruptedException {
+ doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9),
+ createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
+ }
+
+ @Test
+ @SmallTest
+ public void testLoopbackH264DecodeToTexture() throws InterruptedException {
+ doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
+ createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
+ }
+
+ @Test
+ @SmallTest
+ public void testLoopbackVp8CaptureToTexture() throws InterruptedException {
+ doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
+ createCameraCapturer(true /* captureToTexture */), true /* decodeToTexture */);
+ }
+
+ @Test
+ @SmallTest
+ public void testLoopbackH264CaptureToTexture() throws InterruptedException {
+ doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
+ createCameraCapturer(true /* captureToTexture */), true /* decodeToTexture */);
+ }
+
+ // Checks if default front camera can be switched to back camera and then
+ // again to front camera.
+ @Test
+ @SmallTest
+ public void testCameraSwitch() throws InterruptedException {
+ Log.d(TAG, "testCameraSwitch");
+ loopback = true;
+
+ MockSink localRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
+ MockSink remoteRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
+
+ pcClient = createPeerConnectionClient(localRenderer, remoteRenderer,
+ createParametersForVideoCall(VIDEO_CODEC_VP8),
+ createCameraCapturer(false /* captureToTexture */));
+
+ // Wait for local description, set type to answer and set as remote description.
+ assertTrue("Local description was not set.", waitForLocalDescription(WAIT_TIMEOUT));
+ SessionDescription remoteDescription = new SessionDescription(
+ SessionDescription.Type.fromCanonicalForm("answer"), localDesc.description);
+ pcClient.setRemoteDescription(remoteDescription);
+
+ // Wait for ICE connection.
+ assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT));
+
+ // Check that local and remote video frames were rendered.
+ assertTrue("Local video frames were not rendered before camera switch.",
+ localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
+ assertTrue("Remote video frames were not rendered before camera switch.",
+ remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
+
+ for (int i = 0; i < CAMERA_SWITCH_ATTEMPTS; i++) {
+ // Try to switch camera
+ pcClient.switchCamera();
+
+ // Reset video renders and check that local and remote video frames
+ // were rendered after camera switch.
+ localRenderer.reset(EXPECTED_VIDEO_FRAMES);
+ remoteRenderer.reset(EXPECTED_VIDEO_FRAMES);
+ assertTrue("Local video frames were not rendered after camera switch.",
+ localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
+ assertTrue("Remote video frames were not rendered after camera switch.",
+ remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
+ }
+ pcClient.close();
+ assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT));
+ Log.d(TAG, "testCameraSwitch done.");
+ }
+
+ // Checks if video source can be restarted - simulate app goes to
+ // background and back to foreground.
+ @Test
+ @SmallTest
+ public void testVideoSourceRestart() throws InterruptedException {
+ Log.d(TAG, "testVideoSourceRestart");
+ loopback = true;
+
+ MockSink localRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
+ MockSink remoteRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
+
+ pcClient = createPeerConnectionClient(localRenderer, remoteRenderer,
+ createParametersForVideoCall(VIDEO_CODEC_VP8),
+ createCameraCapturer(false /* captureToTexture */));
+
+ // Wait for local description, set type to answer and set as remote description.
+ assertTrue("Local description was not set.", waitForLocalDescription(WAIT_TIMEOUT));
+ SessionDescription remoteDescription = new SessionDescription(
+ SessionDescription.Type.fromCanonicalForm("answer"), localDesc.description);
+ pcClient.setRemoteDescription(remoteDescription);
+
+ // Wait for ICE connection.
+ assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT));
+
+ // Check that local and remote video frames were rendered.
+ assertTrue("Local video frames were not rendered before video restart.",
+ localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
+ assertTrue("Remote video frames were not rendered before video restart.",
+ remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
+
+ // Stop and then start video source a few times.
+ for (int i = 0; i < VIDEO_RESTART_ATTEMPTS; i++) {
+ pcClient.stopVideoSource();
+ Thread.sleep(VIDEO_RESTART_TIMEOUT);
+ pcClient.startVideoSource();
+
+ // Reset video renders and check that local and remote video frames
+ // were rendered after video restart.
+ localRenderer.reset(EXPECTED_VIDEO_FRAMES);
+ remoteRenderer.reset(EXPECTED_VIDEO_FRAMES);
+ assertTrue("Local video frames were not rendered after video restart.",
+ localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
+ assertTrue("Remote video frames were not rendered after video restart.",
+ remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
+ }
+ pcClient.close();
+ assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT));
+ Log.d(TAG, "testVideoSourceRestart done.");
+ }
+
+ // Checks if capture format can be changed on fly and decoder can be reset properly.
+ @Test
+ @SmallTest
+ public void testCaptureFormatChange() throws InterruptedException {
+ Log.d(TAG, "testCaptureFormatChange");
+ loopback = true;
+
+ MockSink localRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
+ MockSink remoteRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
+
+ pcClient = createPeerConnectionClient(localRenderer, remoteRenderer,
+ createParametersForVideoCall(VIDEO_CODEC_VP8),
+ createCameraCapturer(false /* captureToTexture */));
+
+ // Wait for local description, set type to answer and set as remote description.
+ assertTrue("Local description was not set.", waitForLocalDescription(WAIT_TIMEOUT));
+ SessionDescription remoteDescription = new SessionDescription(
+ SessionDescription.Type.fromCanonicalForm("answer"), localDesc.description);
+ pcClient.setRemoteDescription(remoteDescription);
+
+ // Wait for ICE connection.
+ assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT));
+
+ // Check that local and remote video frames were rendered.
+ assertTrue("Local video frames were not rendered before camera resolution change.",
+ localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
+ assertTrue("Remote video frames were not rendered before camera resolution change.",
+ remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
+
+ // Change capture output format a few times.
+ for (int i = 0; i < 2 * CAPTURE_FORMAT_CHANGE_ATTEMPTS; i++) {
+ if (i % 2 == 0) {
+ pcClient.changeCaptureFormat(WIDTH_VGA, HEIGHT_VGA, MAX_VIDEO_FPS);
+ } else {
+ pcClient.changeCaptureFormat(WIDTH_QVGA, HEIGHT_QVGA, MAX_VIDEO_FPS);
+ }
+
+ // Reset video renders and check that local and remote video frames
+ // were rendered after capture format change.
+ localRenderer.reset(EXPECTED_VIDEO_FRAMES);
+ remoteRenderer.reset(EXPECTED_VIDEO_FRAMES);
+ assertTrue("Local video frames were not rendered after capture format change.",
+ localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
+ assertTrue("Remote video frames were not rendered after capture format change.",
+ remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
+ }
+
+ pcClient.close();
+ assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT));
+ Log.d(TAG, "testCaptureFormatChange done.");
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidtests/third_party/.gitignore b/third_party/libwebrtc/examples/androidtests/third_party/.gitignore
new file mode 100644
index 0000000000..52acefb2ec
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidtests/third_party/.gitignore
@@ -0,0 +1,3 @@
+# This file is needed for projects that has this directory as a separate Git
+# mirror in DEPS. Without it, a lot is wiped and re-downloaded for each sync.
+/gradle
diff --git a/third_party/libwebrtc/examples/androidtests/third_party/README.webrtc b/third_party/libwebrtc/examples/androidtests/third_party/README.webrtc
new file mode 100644
index 0000000000..a6ea884923
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidtests/third_party/README.webrtc
@@ -0,0 +1,10 @@
+The third_party directory contains sources from other projects.
+
+Code in third_party must document the license under which the source is being
+used. If the source itself does not include a license header or file, create
+an entry in this file that refers to reliable documentation of the project's
+license terms on the web (and add a note pointing here in the README file in
+that directory).
+
+<Include table of license information here, once it is available>
+
diff --git a/third_party/libwebrtc/examples/androidvoip/AndroidManifest.xml b/third_party/libwebrtc/examples/androidvoip/AndroidManifest.xml
new file mode 100644
index 0000000000..8e096b0452
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidvoip/AndroidManifest.xml
@@ -0,0 +1,38 @@
+<?xml version="1.0" encoding="utf-8"?>
+
+<!--
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+-->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="org.webrtc.examples.androidvoip">
+
+
+ <uses-permission android:name="android.permission.INTERNET" />
+ <uses-permission android:name="android.permission.RECORD_AUDIO" />
+ <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
+
+ <uses-feature android:name="android.hardware.microphone" android:required="true" />
+ <uses-feature android:name="android.hardware.telephony" android:required="false" />
+
+ <application
+ android:allowBackup="true"
+ android:label="@string/app_name"
+ android:supportsRtl="true">
+ <activity android:name=".MainActivity"
+ android:windowSoftInputMode="stateHidden"
+ android:exported="true">
+ <intent-filter>
+ <action android:name="android.intent.action.MAIN" />
+ <category android:name="android.intent.category.LAUNCHER" />
+ </intent-filter>
+ </activity>
+ </application>
+
+</manifest>
diff --git a/third_party/libwebrtc/examples/androidvoip/BUILD.gn b/third_party/libwebrtc/examples/androidvoip/BUILD.gn
new file mode 100644
index 0000000000..cea05ea128
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidvoip/BUILD.gn
@@ -0,0 +1,95 @@
+# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("//webrtc.gni")
+
+if (is_android) {
+ import("//third_party/jni_zero/jni_zero.gni")
+ rtc_android_apk("androidvoip") {
+ testonly = true
+ apk_name = "androidvoip"
+ android_manifest = "AndroidManifest.xml"
+ min_sdk_version = 21
+ target_sdk_version = 31
+
+ sources = [
+ "java/org/webrtc/examples/androidvoip/MainActivity.java",
+ "java/org/webrtc/examples/androidvoip/OnVoipClientTaskCompleted.java",
+ "java/org/webrtc/examples/androidvoip/VoipClient.java",
+ ]
+
+ deps = [
+ ":resources",
+ "//rtc_base:base_java",
+ "//sdk/android:base_java",
+ "//sdk/android:java_audio_device_module_java",
+ "//sdk/android:video_java",
+ "//third_party/androidx:androidx_core_core_java",
+ "//third_party/androidx:androidx_legacy_legacy_support_v4_java",
+ ]
+
+ shared_libraries = [ ":examples_androidvoip_jni" ]
+ }
+
+ generate_jni("generated_jni") {
+ testonly = true
+ sources = [ "java/org/webrtc/examples/androidvoip/VoipClient.java" ]
+ namespace = "webrtc_examples"
+ jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
+ }
+
+ rtc_shared_library("examples_androidvoip_jni") {
+ testonly = true
+ sources = [
+ "jni/android_voip_client.cc",
+ "jni/android_voip_client.h",
+ "jni/onload.cc",
+ ]
+
+ suppressed_configs += [ "//build/config/android:hide_all_but_jni_onload" ]
+ configs += [ "//build/config/android:hide_all_but_jni" ]
+
+ deps = [
+ ":generated_jni",
+ "../../rtc_base:async_packet_socket",
+ "../../rtc_base:async_udp_socket",
+ "../../rtc_base:logging",
+ "../../rtc_base:network",
+ "../../rtc_base:socket_address",
+ "../../rtc_base:socket_server",
+ "../../rtc_base:ssl",
+ "../../rtc_base:threading",
+ "//api:transport_api",
+ "//api/audio_codecs:audio_codecs_api",
+ "//api/audio_codecs:builtin_audio_decoder_factory",
+ "//api/audio_codecs:builtin_audio_encoder_factory",
+ "//api/task_queue:default_task_queue_factory",
+ "//api/voip:voip_api",
+ "//api/voip:voip_engine_factory",
+ "//rtc_base/third_party/sigslot:sigslot",
+ "//sdk/android:native_api_audio_device_module",
+ "//sdk/android:native_api_base",
+ "//sdk/android:native_api_jni",
+ "//third_party/abseil-cpp/absl/memory:memory",
+ ]
+ }
+
+ android_resources("resources") {
+ testonly = true
+ custom_package = "org.webrtc.examples.androidvoip"
+ sources = [
+ "res/layout/activity_main.xml",
+ "res/values/colors.xml",
+ "res/values/strings.xml",
+ ]
+
+ # Needed for Bazel converter.
+ resource_dirs = [ "res" ]
+ assert(resource_dirs != []) # Mark as used.
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidvoip/DEPS b/third_party/libwebrtc/examples/androidvoip/DEPS
new file mode 100644
index 0000000000..edb714dd44
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidvoip/DEPS
@@ -0,0 +1,3 @@
+include_rules = [
+ "+sdk/android/native_api",
+]
diff --git a/third_party/libwebrtc/examples/androidvoip/OWNERS b/third_party/libwebrtc/examples/androidvoip/OWNERS
new file mode 100644
index 0000000000..e7d3200562
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidvoip/OWNERS
@@ -0,0 +1,2 @@
+natim@webrtc.org
+xalep@webrtc.org
diff --git a/third_party/libwebrtc/examples/androidvoip/java/org/webrtc/examples/androidvoip/MainActivity.java b/third_party/libwebrtc/examples/androidvoip/java/org/webrtc/examples/androidvoip/MainActivity.java
new file mode 100644
index 0000000000..d06d6adf0d
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidvoip/java/org/webrtc/examples/androidvoip/MainActivity.java
@@ -0,0 +1,341 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.examples.androidvoip;
+
+import android.Manifest.permission;
+import android.app.Activity;
+import android.app.AlertDialog;
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.os.Bundle;
+import android.view.Gravity;
+import android.view.View;
+import android.widget.AdapterView;
+import android.widget.ArrayAdapter;
+import android.widget.Button;
+import android.widget.EditText;
+import android.widget.RelativeLayout;
+import android.widget.ScrollView;
+import android.widget.Spinner;
+import android.widget.Switch;
+import android.widget.TextView;
+import android.widget.Toast;
+import android.widget.ToggleButton;
+import androidx.core.app.ActivityCompat;
+import androidx.core.content.ContextCompat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.stream.Collectors;
+import org.webrtc.ContextUtils;
+
+public class MainActivity extends Activity implements OnVoipClientTaskCompleted {
+ private static final int NUM_SUPPORTED_CODECS = 6;
+
+ private VoipClient voipClient;
+ private List<String> supportedCodecs;
+ private boolean[] isDecoderSelected;
+ private Set<Integer> selectedDecoders;
+
+ private Toast toast;
+ private ScrollView scrollView;
+ private TextView localIPAddressTextView;
+ private EditText localPortNumberEditText;
+ private EditText remoteIPAddressEditText;
+ private EditText remotePortNumberEditText;
+ private Spinner encoderSpinner;
+ private Button decoderSelectionButton;
+ private TextView decodersTextView;
+ private ToggleButton sessionButton;
+ private RelativeLayout switchLayout;
+ private Switch sendSwitch;
+ private Switch playoutSwitch;
+
+ @Override
+ protected void onCreate(Bundle savedInstance) {
+ ContextUtils.initialize(getApplicationContext());
+
+ super.onCreate(savedInstance);
+ setContentView(R.layout.activity_main);
+
+ System.loadLibrary("examples_androidvoip_jni");
+
+ voipClient = new VoipClient(getApplicationContext(), this);
+ voipClient.getAndSetUpLocalIPAddress();
+ voipClient.getAndSetUpSupportedCodecs();
+
+ isDecoderSelected = new boolean[NUM_SUPPORTED_CODECS];
+ selectedDecoders = new HashSet<>();
+
+ toast = Toast.makeText(this, "", Toast.LENGTH_SHORT);
+
+ scrollView = (ScrollView) findViewById(R.id.scroll_view);
+ localIPAddressTextView = (TextView) findViewById(R.id.local_ip_address_text_view);
+ localPortNumberEditText = (EditText) findViewById(R.id.local_port_number_edit_text);
+ remoteIPAddressEditText = (EditText) findViewById(R.id.remote_ip_address_edit_text);
+ remotePortNumberEditText = (EditText) findViewById(R.id.remote_port_number_edit_text);
+ encoderSpinner = (Spinner) findViewById(R.id.encoder_spinner);
+ decoderSelectionButton = (Button) findViewById(R.id.decoder_selection_button);
+ decodersTextView = (TextView) findViewById(R.id.decoders_text_view);
+ sessionButton = (ToggleButton) findViewById(R.id.session_button);
+ switchLayout = (RelativeLayout) findViewById(R.id.switch_layout);
+ sendSwitch = (Switch) findViewById(R.id.start_send_switch);
+ playoutSwitch = (Switch) findViewById(R.id.start_playout_switch);
+
+ setUpSessionButton();
+ setUpSendAndPlayoutSwitch();
+ }
+
+ private void setUpEncoderSpinner(List<String> supportedCodecs) {
+ ArrayAdapter<String> encoderAdapter =
+ new ArrayAdapter<String>(this, android.R.layout.simple_spinner_item, supportedCodecs);
+ encoderAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
+ encoderSpinner.setAdapter(encoderAdapter);
+ encoderSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
+ @Override
+ public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
+ voipClient.setEncoder((String) parent.getSelectedItem());
+ }
+ @Override
+ public void onNothingSelected(AdapterView<?> parent) {}
+ });
+ }
+
+ private List<String> getSelectedDecoders() {
+ List<String> decoders = new ArrayList<>();
+ for (int i = 0; i < supportedCodecs.size(); i++) {
+ if (selectedDecoders.contains(i)) {
+ decoders.add(supportedCodecs.get(i));
+ }
+ }
+ return decoders;
+ }
+
+ private void setUpDecoderSelectionButton(List<String> supportedCodecs) {
+ decoderSelectionButton.setOnClickListener((view) -> {
+ AlertDialog.Builder dialogBuilder = new AlertDialog.Builder(this);
+ dialogBuilder.setTitle(R.string.dialog_title);
+
+ // Populate multi choice items with supported decoders.
+ String[] supportedCodecsArray = supportedCodecs.toArray(new String[0]);
+ dialogBuilder.setMultiChoiceItems(
+ supportedCodecsArray, isDecoderSelected, (dialog, position, isChecked) -> {
+ if (isChecked) {
+ selectedDecoders.add(position);
+ } else if (!isChecked) {
+ selectedDecoders.remove(position);
+ }
+ });
+
+ // "Ok" button.
+ dialogBuilder.setPositiveButton(R.string.ok_label, (dialog, position) -> {
+ List<String> decoders = getSelectedDecoders();
+ String result = decoders.stream().collect(Collectors.joining(", "));
+ if (result.isEmpty()) {
+ decodersTextView.setText(R.string.decoders_text_view_default);
+ } else {
+ decodersTextView.setText(result);
+ }
+ voipClient.setDecoders(decoders);
+ });
+
+ // "Dismiss" button.
+ dialogBuilder.setNegativeButton(
+ R.string.dismiss_label, (dialog, position) -> { dialog.dismiss(); });
+
+ // "Clear All" button.
+ dialogBuilder.setNeutralButton(R.string.clear_all_label, (dialog, position) -> {
+ Arrays.fill(isDecoderSelected, false);
+ selectedDecoders.clear();
+ decodersTextView.setText(R.string.decoders_text_view_default);
+ });
+
+ AlertDialog dialog = dialogBuilder.create();
+ dialog.show();
+ });
+ }
+
+ private void setUpSessionButton() {
+ sessionButton.setOnCheckedChangeListener((button, isChecked) -> {
+ // Ask for permission on RECORD_AUDIO if not granted.
+ if (ContextCompat.checkSelfPermission(this, permission.RECORD_AUDIO)
+ != PackageManager.PERMISSION_GRANTED) {
+ String[] sList = {permission.RECORD_AUDIO};
+ ActivityCompat.requestPermissions(this, sList, 1);
+ }
+
+ if (isChecked) {
+ // Order matters here, addresses have to be set before starting session
+ // before setting codec.
+ voipClient.setLocalAddress(localIPAddressTextView.getText().toString(),
+ Integer.parseInt(localPortNumberEditText.getText().toString()));
+ voipClient.setRemoteAddress(remoteIPAddressEditText.getText().toString(),
+ Integer.parseInt(remotePortNumberEditText.getText().toString()));
+ voipClient.startSession();
+ voipClient.setEncoder((String) encoderSpinner.getSelectedItem());
+ voipClient.setDecoders(getSelectedDecoders());
+ } else {
+ voipClient.stopSession();
+ }
+ });
+ }
+
+ private void setUpSendAndPlayoutSwitch() {
+ sendSwitch.setOnCheckedChangeListener((button, isChecked) -> {
+ if (isChecked) {
+ voipClient.startSend();
+ } else {
+ voipClient.stopSend();
+ }
+ });
+
+ playoutSwitch.setOnCheckedChangeListener((button, isChecked) -> {
+ if (isChecked) {
+ voipClient.startPlayout();
+ } else {
+ voipClient.stopPlayout();
+ }
+ });
+ }
+
+ private void setUpIPAddressEditTexts(String localIPAddress) {
+ if (localIPAddress.isEmpty()) {
+ showToast("Please check your network configuration");
+ } else {
+ localIPAddressTextView.setText(localIPAddress);
+ // By default remote IP address is the same as local IP address.
+ remoteIPAddressEditText.setText(localIPAddress);
+ }
+ }
+
+ private void showToast(String message) {
+ if (toast != null) {
+ toast.cancel();
+ toast = Toast.makeText(this, message, Toast.LENGTH_SHORT);
+ toast.setGravity(Gravity.TOP, 0, 200);
+ toast.show();
+ }
+ }
+
+ @Override
+ protected void onDestroy() {
+ voipClient.close();
+ voipClient = null;
+
+ super.onDestroy();
+ }
+
+ @Override
+ public void onGetLocalIPAddressCompleted(String localIPAddress) {
+ runOnUiThread(() -> { setUpIPAddressEditTexts(localIPAddress); });
+ }
+
+ @Override
+ public void onGetSupportedCodecsCompleted(List<String> supportedCodecs) {
+ runOnUiThread(() -> {
+ this.supportedCodecs = supportedCodecs;
+ setUpEncoderSpinner(supportedCodecs);
+ setUpDecoderSelectionButton(supportedCodecs);
+ });
+ }
+
+ @Override
+ public void onVoipClientInitializationCompleted(boolean isSuccessful) {
+ runOnUiThread(() -> {
+ if (!isSuccessful) {
+ showToast("Error initializing audio device");
+ }
+ });
+ }
+
+ @Override
+ public void onStartSessionCompleted(boolean isSuccessful) {
+ runOnUiThread(() -> {
+ if (isSuccessful) {
+ showToast("Session started");
+ switchLayout.setVisibility(View.VISIBLE);
+ scrollView.post(() -> { scrollView.fullScroll(ScrollView.FOCUS_DOWN); });
+ } else {
+ showToast("Failed to start session");
+ }
+ });
+ }
+
+ @Override
+ public void onStopSessionCompleted(boolean isSuccessful) {
+ runOnUiThread(() -> {
+ if (isSuccessful) {
+ showToast("Session stopped");
+ // Set listeners to null so the checked state can be changed programmatically.
+ sendSwitch.setOnCheckedChangeListener(null);
+ playoutSwitch.setOnCheckedChangeListener(null);
+ sendSwitch.setChecked(false);
+ playoutSwitch.setChecked(false);
+ // Redo the switch listener setup.
+ setUpSendAndPlayoutSwitch();
+ switchLayout.setVisibility(View.GONE);
+ } else {
+ showToast("Failed to stop session");
+ }
+ });
+ }
+
+ @Override
+ public void onStartSendCompleted(boolean isSuccessful) {
+ runOnUiThread(() -> {
+ if (isSuccessful) {
+ showToast("Started sending");
+ } else {
+ showToast("Error initializing microphone");
+ }
+ });
+ }
+
+ @Override
+ public void onStopSendCompleted(boolean isSuccessful) {
+ runOnUiThread(() -> {
+ if (isSuccessful) {
+ showToast("Stopped sending");
+ } else {
+ showToast("Microphone termination failed");
+ }
+ });
+ }
+
+ @Override
+ public void onStartPlayoutCompleted(boolean isSuccessful) {
+ runOnUiThread(() -> {
+ if (isSuccessful) {
+ showToast("Started playout");
+ } else {
+ showToast("Error initializing speaker");
+ }
+ });
+ }
+
+ @Override
+ public void onStopPlayoutCompleted(boolean isSuccessful) {
+ runOnUiThread(() -> {
+ if (isSuccessful) {
+ showToast("Stopped playout");
+ } else {
+ showToast("Speaker termination failed");
+ }
+ });
+ }
+
+ @Override
+ public void onUninitializedVoipClient() {
+ runOnUiThread(() -> { showToast("Voip client is uninitialized"); });
+ }
+}
diff --git a/third_party/libwebrtc/examples/androidvoip/java/org/webrtc/examples/androidvoip/OnVoipClientTaskCompleted.java b/third_party/libwebrtc/examples/androidvoip/java/org/webrtc/examples/androidvoip/OnVoipClientTaskCompleted.java
new file mode 100644
index 0000000000..bb85e048bb
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidvoip/java/org/webrtc/examples/androidvoip/OnVoipClientTaskCompleted.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.examples.androidvoip;
+
+import java.util.List;
+
+public interface OnVoipClientTaskCompleted {
+ void onGetLocalIPAddressCompleted(String localIPAddress);
+ void onGetSupportedCodecsCompleted(List<String> supportedCodecs);
+ void onVoipClientInitializationCompleted(boolean isSuccessful);
+ void onStartSessionCompleted(boolean isSuccessful);
+ void onStopSessionCompleted(boolean isSuccessful);
+ void onStartSendCompleted(boolean isSuccessful);
+ void onStopSendCompleted(boolean isSuccessful);
+ void onStartPlayoutCompleted(boolean isSuccessful);
+ void onStopPlayoutCompleted(boolean isSuccessful);
+ void onUninitializedVoipClient();
+}
diff --git a/third_party/libwebrtc/examples/androidvoip/java/org/webrtc/examples/androidvoip/VoipClient.java b/third_party/libwebrtc/examples/androidvoip/java/org/webrtc/examples/androidvoip/VoipClient.java
new file mode 100644
index 0000000000..69a993d344
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidvoip/java/org/webrtc/examples/androidvoip/VoipClient.java
@@ -0,0 +1,191 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.examples.androidvoip;
+
+import android.content.Context;
+import android.os.Handler;
+import android.os.HandlerThread;
+import java.util.ArrayList;
+import java.util.List;
+import org.webrtc.CalledByNative;
+
+public class VoipClient {
+ private long nativeClient;
+ private OnVoipClientTaskCompleted listener;
+
+ public VoipClient(Context applicationContext, OnVoipClientTaskCompleted listener) {
+ this.listener = listener;
+ nativeClient = nativeCreateClient(applicationContext, this);
+ }
+
+ private boolean isInitialized() {
+ return nativeClient != 0;
+ }
+
+ public void getAndSetUpSupportedCodecs() {
+ if (isInitialized()) {
+ nativeGetSupportedCodecs(nativeClient);
+ } else {
+ listener.onUninitializedVoipClient();
+ }
+ }
+
+ public void getAndSetUpLocalIPAddress() {
+ if (isInitialized()) {
+ nativeGetLocalIPAddress(nativeClient);
+ } else {
+ listener.onUninitializedVoipClient();
+ }
+ }
+
+ public void setEncoder(String encoder) {
+ if (isInitialized()) {
+ nativeSetEncoder(nativeClient, encoder);
+ } else {
+ listener.onUninitializedVoipClient();
+ }
+ }
+
+ public void setDecoders(List<String> decoders) {
+ if (isInitialized()) {
+ nativeSetDecoders(nativeClient, decoders);
+ } else {
+ listener.onUninitializedVoipClient();
+ }
+ }
+
+ public void setLocalAddress(String ipAddress, int portNumber) {
+ if (isInitialized()) {
+ nativeSetLocalAddress(nativeClient, ipAddress, portNumber);
+ } else {
+ listener.onUninitializedVoipClient();
+ }
+ }
+
+ public void setRemoteAddress(String ipAddress, int portNumber) {
+ if (isInitialized()) {
+ nativeSetRemoteAddress(nativeClient, ipAddress, portNumber);
+ } else {
+ listener.onUninitializedVoipClient();
+ }
+ }
+
+ public void startSession() {
+ if (isInitialized()) {
+ nativeStartSession(nativeClient);
+ } else {
+ listener.onUninitializedVoipClient();
+ }
+ }
+
+ public void stopSession() {
+ if (isInitialized()) {
+ nativeStopSession(nativeClient);
+ } else {
+ listener.onUninitializedVoipClient();
+ }
+ }
+
+ public void startSend() {
+ if (isInitialized()) {
+ nativeStartSend(nativeClient);
+ } else {
+ listener.onUninitializedVoipClient();
+ }
+ }
+
+ public void stopSend() {
+ if (isInitialized()) {
+ nativeStopSend(nativeClient);
+ } else {
+ listener.onUninitializedVoipClient();
+ }
+ }
+
+ public void startPlayout() {
+ if (isInitialized()) {
+ nativeStartPlayout(nativeClient);
+ } else {
+ listener.onUninitializedVoipClient();
+ }
+ }
+
+ public void stopPlayout() {
+ if (isInitialized()) {
+ nativeStopPlayout(nativeClient);
+ } else {
+ listener.onUninitializedVoipClient();
+ }
+ }
+
+ public void close() {
+ nativeDelete(nativeClient);
+ nativeClient = 0;
+ }
+
+ @CalledByNative
+ public void onGetLocalIPAddressCompleted(String localIPAddress) {
+ listener.onGetLocalIPAddressCompleted(localIPAddress);
+ }
+
+ @CalledByNative
+ public void onGetSupportedCodecsCompleted(List<String> supportedCodecs) {
+ listener.onGetSupportedCodecsCompleted(supportedCodecs);
+ }
+
+ @CalledByNative
+ public void onStartSessionCompleted(boolean isSuccessful) {
+ listener.onStartSessionCompleted(isSuccessful);
+ }
+
+ @CalledByNative
+ public void onStopSessionCompleted(boolean isSuccessful) {
+ listener.onStopSessionCompleted(isSuccessful);
+ }
+
+ @CalledByNative
+ public void onStartSendCompleted(boolean isSuccessful) {
+ listener.onStartSendCompleted(isSuccessful);
+ }
+
+ @CalledByNative
+ public void onStopSendCompleted(boolean isSuccessful) {
+ listener.onStopSendCompleted(isSuccessful);
+ }
+
+ @CalledByNative
+ public void onStartPlayoutCompleted(boolean isSuccessful) {
+ listener.onStartPlayoutCompleted(isSuccessful);
+ }
+
+ @CalledByNative
+ public void onStopPlayoutCompleted(boolean isSuccessful) {
+ listener.onStopPlayoutCompleted(isSuccessful);
+ }
+
+ private static native long nativeCreateClient(
+ Context applicationContext, VoipClient javaVoipClient);
+ private static native void nativeGetSupportedCodecs(long nativeAndroidVoipClient);
+ private static native void nativeGetLocalIPAddress(long nativeAndroidVoipClient);
+ private static native void nativeSetEncoder(long nativeAndroidVoipClient, String encoder);
+ private static native void nativeSetDecoders(long nativeAndroidVoipClient, List<String> decoders);
+ private static native void nativeSetLocalAddress(
+ long nativeAndroidVoipClient, String ipAddress, int portNumber);
+ private static native void nativeSetRemoteAddress(
+ long nativeAndroidVoipClient, String ipAddress, int portNumber);
+ private static native void nativeStartSession(long nativeAndroidVoipClient);
+ private static native void nativeStopSession(long nativeAndroidVoipClient);
+ private static native void nativeStartSend(long nativeAndroidVoipClient);
+ private static native void nativeStopSend(long nativeAndroidVoipClient);
+ private static native void nativeStartPlayout(long nativeAndroidVoipClient);
+ private static native void nativeStopPlayout(long nativeAndroidVoipClient);
+ private static native void nativeDelete(long nativeAndroidVoipClient);
+}
diff --git a/third_party/libwebrtc/examples/androidvoip/jni/android_voip_client.cc b/third_party/libwebrtc/examples/androidvoip/jni/android_voip_client.cc
new file mode 100644
index 0000000000..8a0a3badb9
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidvoip/jni/android_voip_client.cc
@@ -0,0 +1,514 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "examples/androidvoip/jni/android_voip_client.h"
+
+#include <errno.h>
+#include <sys/socket.h>
+
+#include <algorithm>
+#include <map>
+#include <memory>
+#include <unordered_map>
+#include <unordered_set>
+#include <utility>
+#include <vector>
+
+#include "absl/memory/memory.h"
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/voip/voip_codec.h"
+#include "api/voip/voip_engine_factory.h"
+#include "api/voip/voip_network.h"
+#include "examples/androidvoip/generated_jni/VoipClient_jni.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/network.h"
+#include "rtc_base/socket_server.h"
+#include "sdk/android/native_api/audio_device_module/audio_device_android.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/native_api/jni/jvm.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace {
+
+#define RUN_ON_VOIP_THREAD(method, ...) \
+ if (!voip_thread_->IsCurrent()) { \
+ voip_thread_->PostTask( \
+ std::bind(&AndroidVoipClient::method, this, ##__VA_ARGS__)); \
+ return; \
+ } \
+ RTC_DCHECK_RUN_ON(voip_thread_.get());
+
+// Connects a UDP socket to a public address and returns the local
+// address associated with it. Since it binds to the "any" address
+// internally, it returns the default local address on a multi-homed
+// endpoint. Implementation copied from
+// BasicNetworkManager::QueryDefaultLocalAddress.
+rtc::IPAddress QueryDefaultLocalAddress(int family) {
+ const char kPublicIPv4Host[] = "8.8.8.8";
+ const char kPublicIPv6Host[] = "2001:4860:4860::8888";
+ const int kPublicPort = 53;
+ std::unique_ptr<rtc::Thread> thread = rtc::Thread::CreateWithSocketServer();
+
+ RTC_DCHECK(thread->socketserver() != nullptr);
+ RTC_DCHECK(family == AF_INET || family == AF_INET6);
+
+ std::unique_ptr<rtc::Socket> socket(
+ thread->socketserver()->CreateSocket(family, SOCK_DGRAM));
+ if (!socket) {
+ RTC_LOG_ERR(LS_ERROR) << "Socket creation failed";
+ return rtc::IPAddress();
+ }
+
+ auto host = family == AF_INET ? kPublicIPv4Host : kPublicIPv6Host;
+ if (socket->Connect(rtc::SocketAddress(host, kPublicPort)) < 0) {
+ if (socket->GetError() != ENETUNREACH &&
+ socket->GetError() != EHOSTUNREACH) {
+ RTC_LOG(LS_INFO) << "Connect failed with " << socket->GetError();
+ }
+ return rtc::IPAddress();
+ }
+ return socket->GetLocalAddress().ipaddr();
+}
+
+// Assigned payload type for supported built-in codecs. PCMU, PCMA,
+// and G722 have set payload types. Whereas opus, ISAC, and ILBC
+// have dynamic payload types.
+enum class PayloadType : int {
+ kPcmu = 0,
+ kPcma = 8,
+ kG722 = 9,
+ kOpus = 96,
+ kIsac = 97,
+ kIlbc = 98,
+};
+
+// Returns the payload type corresponding to codec_name. Only
+// supports the built-in codecs.
+int GetPayloadType(const std::string& codec_name) {
+ RTC_DCHECK(codec_name == "PCMU" || codec_name == "PCMA" ||
+ codec_name == "G722" || codec_name == "opus" ||
+ codec_name == "ISAC" || codec_name == "ILBC");
+
+ if (codec_name == "PCMU") {
+ return static_cast<int>(PayloadType::kPcmu);
+ } else if (codec_name == "PCMA") {
+ return static_cast<int>(PayloadType::kPcma);
+ } else if (codec_name == "G722") {
+ return static_cast<int>(PayloadType::kG722);
+ } else if (codec_name == "opus") {
+ return static_cast<int>(PayloadType::kOpus);
+ } else if (codec_name == "ISAC") {
+ return static_cast<int>(PayloadType::kIsac);
+ } else if (codec_name == "ILBC") {
+ return static_cast<int>(PayloadType::kIlbc);
+ }
+
+ RTC_DCHECK_NOTREACHED();
+ return -1;
+}
+
+} // namespace
+
+namespace webrtc_examples {
+
+void AndroidVoipClient::Init(
+ JNIEnv* env,
+ const webrtc::JavaParamRef<jobject>& application_context) {
+ webrtc::VoipEngineConfig config;
+ config.encoder_factory = webrtc::CreateBuiltinAudioEncoderFactory();
+ config.decoder_factory = webrtc::CreateBuiltinAudioDecoderFactory();
+ config.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory();
+ config.audio_device_module =
+ webrtc::CreateJavaAudioDeviceModule(env, application_context.obj());
+ config.audio_processing = webrtc::AudioProcessingBuilder().Create();
+
+ voip_thread_->Start();
+
+ // Due to consistent thread requirement on
+ // modules/audio_device/android/audio_device_template.h,
+ // code is invoked in the context of voip_thread_.
+ voip_thread_->BlockingCall([this, &config] {
+ RTC_DCHECK_RUN_ON(voip_thread_.get());
+
+ supported_codecs_ = config.encoder_factory->GetSupportedEncoders();
+ env_ = webrtc::AttachCurrentThreadIfNeeded();
+ voip_engine_ = webrtc::CreateVoipEngine(std::move(config));
+ });
+}
+
+AndroidVoipClient::~AndroidVoipClient() {
+ voip_thread_->BlockingCall([this] {
+ RTC_DCHECK_RUN_ON(voip_thread_.get());
+
+ JavaVM* jvm = nullptr;
+ env_->GetJavaVM(&jvm);
+ if (!jvm) {
+ RTC_LOG(LS_ERROR) << "Failed to retrieve JVM";
+ return;
+ }
+ jint res = jvm->DetachCurrentThread();
+ if (res != JNI_OK) {
+ RTC_LOG(LS_ERROR) << "DetachCurrentThread failed: " << res;
+ }
+ });
+
+ voip_thread_->Stop();
+}
+
+AndroidVoipClient* AndroidVoipClient::Create(
+ JNIEnv* env,
+ const webrtc::JavaParamRef<jobject>& application_context,
+ const webrtc::JavaParamRef<jobject>& j_voip_client) {
+ // Using `new` to access a non-public constructor.
+ auto voip_client =
+ absl::WrapUnique(new AndroidVoipClient(env, j_voip_client));
+ voip_client->Init(env, application_context);
+ return voip_client.release();
+}
+
+void AndroidVoipClient::GetSupportedCodecs(JNIEnv* env) {
+ RUN_ON_VOIP_THREAD(GetSupportedCodecs, env);
+
+ std::vector<std::string> names;
+ for (const webrtc::AudioCodecSpec& spec : supported_codecs_) {
+ names.push_back(spec.format.name);
+ }
+ webrtc::ScopedJavaLocalRef<jstring> (*convert_function)(
+ JNIEnv*, const std::string&) = &webrtc::NativeToJavaString;
+ Java_VoipClient_onGetSupportedCodecsCompleted(
+ env_, j_voip_client_, NativeToJavaList(env_, names, convert_function));
+}
+
+void AndroidVoipClient::GetLocalIPAddress(JNIEnv* env) {
+ RUN_ON_VOIP_THREAD(GetLocalIPAddress, env);
+
+ std::string local_ip_address;
+ rtc::IPAddress ipv4_address = QueryDefaultLocalAddress(AF_INET);
+ if (!ipv4_address.IsNil()) {
+ local_ip_address = ipv4_address.ToString();
+ } else {
+ rtc::IPAddress ipv6_address = QueryDefaultLocalAddress(AF_INET6);
+ if (!ipv6_address.IsNil()) {
+ local_ip_address = ipv6_address.ToString();
+ }
+ }
+ Java_VoipClient_onGetLocalIPAddressCompleted(
+ env_, j_voip_client_, webrtc::NativeToJavaString(env_, local_ip_address));
+}
+
+void AndroidVoipClient::SetEncoder(const std::string& encoder) {
+ RTC_DCHECK_RUN_ON(voip_thread_.get());
+
+ if (!channel_) {
+ RTC_LOG(LS_ERROR) << "Channel has not been created";
+ return;
+ }
+ for (const webrtc::AudioCodecSpec& codec : supported_codecs_) {
+ if (codec.format.name == encoder) {
+ webrtc::VoipResult result = voip_engine_->Codec().SetSendCodec(
+ *channel_, GetPayloadType(codec.format.name), codec.format);
+ RTC_CHECK(result == webrtc::VoipResult::kOk);
+ return;
+ }
+ }
+}
+
+void AndroidVoipClient::SetEncoder(
+ JNIEnv* env,
+ const webrtc::JavaParamRef<jstring>& j_encoder_string) {
+ const std::string& chosen_encoder =
+ webrtc::JavaToNativeString(env, j_encoder_string);
+ voip_thread_->PostTask(
+ [this, chosen_encoder] { SetEncoder(chosen_encoder); });
+}
+
+void AndroidVoipClient::SetDecoders(const std::vector<std::string>& decoders) {
+ RTC_DCHECK_RUN_ON(voip_thread_.get());
+
+ if (!channel_) {
+ RTC_LOG(LS_ERROR) << "Channel has not been created";
+ return;
+ }
+ std::map<int, webrtc::SdpAudioFormat> decoder_specs;
+ for (const webrtc::AudioCodecSpec& codec : supported_codecs_) {
+ if (std::find(decoders.begin(), decoders.end(), codec.format.name) !=
+ decoders.end()) {
+ decoder_specs.insert({GetPayloadType(codec.format.name), codec.format});
+ }
+ }
+
+ webrtc::VoipResult result =
+ voip_engine_->Codec().SetReceiveCodecs(*channel_, decoder_specs);
+ RTC_CHECK(result == webrtc::VoipResult::kOk);
+}
+
+void AndroidVoipClient::SetDecoders(
+ JNIEnv* env,
+ const webrtc::JavaParamRef<jobject>& j_decoder_strings) {
+ const std::vector<std::string>& chosen_decoders =
+ webrtc::JavaListToNativeVector<std::string, jstring>(
+ env, j_decoder_strings, &webrtc::JavaToNativeString);
+ voip_thread_->PostTask(
+ [this, chosen_decoders] { SetDecoders(chosen_decoders); });
+}
+
+void AndroidVoipClient::SetLocalAddress(const std::string& ip_address,
+ const int port_number) {
+ RTC_DCHECK_RUN_ON(voip_thread_.get());
+
+ rtp_local_address_ = rtc::SocketAddress(ip_address, port_number);
+ rtcp_local_address_ = rtc::SocketAddress(ip_address, port_number + 1);
+}
+
+void AndroidVoipClient::SetLocalAddress(
+ JNIEnv* env,
+ const webrtc::JavaParamRef<jstring>& j_ip_address_string,
+ jint j_port_number_int) {
+ const std::string& ip_address =
+ webrtc::JavaToNativeString(env, j_ip_address_string);
+ voip_thread_->PostTask([this, ip_address, j_port_number_int] {
+ SetLocalAddress(ip_address, j_port_number_int);
+ });
+}
+
+void AndroidVoipClient::SetRemoteAddress(const std::string& ip_address,
+ const int port_number) {
+ RTC_DCHECK_RUN_ON(voip_thread_.get());
+
+ rtp_remote_address_ = rtc::SocketAddress(ip_address, port_number);
+ rtcp_remote_address_ = rtc::SocketAddress(ip_address, port_number + 1);
+}
+
+void AndroidVoipClient::SetRemoteAddress(
+ JNIEnv* env,
+ const webrtc::JavaParamRef<jstring>& j_ip_address_string,
+ jint j_port_number_int) {
+ const std::string& ip_address =
+ webrtc::JavaToNativeString(env, j_ip_address_string);
+ voip_thread_->PostTask([this, ip_address, j_port_number_int] {
+ SetRemoteAddress(ip_address, j_port_number_int);
+ });
+}
+
+void AndroidVoipClient::StartSession(JNIEnv* env) {
+ RUN_ON_VOIP_THREAD(StartSession, env);
+
+ // CreateChannel guarantees to return valid channel id.
+ channel_ = voip_engine_->Base().CreateChannel(this, absl::nullopt);
+
+ rtp_socket_.reset(rtc::AsyncUDPSocket::Create(voip_thread_->socketserver(),
+ rtp_local_address_));
+ if (!rtp_socket_) {
+ RTC_LOG_ERR(LS_ERROR) << "Socket creation failed";
+ Java_VoipClient_onStartSessionCompleted(env_, j_voip_client_,
+ /*isSuccessful=*/false);
+ return;
+ }
+ rtp_socket_->SignalReadPacket.connect(
+ this, &AndroidVoipClient::OnSignalReadRTPPacket);
+
+ rtcp_socket_.reset(rtc::AsyncUDPSocket::Create(voip_thread_->socketserver(),
+ rtcp_local_address_));
+ if (!rtcp_socket_) {
+ RTC_LOG_ERR(LS_ERROR) << "Socket creation failed";
+ Java_VoipClient_onStartSessionCompleted(env_, j_voip_client_,
+ /*isSuccessful=*/false);
+ return;
+ }
+ rtcp_socket_->SignalReadPacket.connect(
+ this, &AndroidVoipClient::OnSignalReadRTCPPacket);
+ Java_VoipClient_onStartSessionCompleted(env_, j_voip_client_,
+ /*isSuccessful=*/true);
+}
+
+void AndroidVoipClient::StopSession(JNIEnv* env) {
+ RUN_ON_VOIP_THREAD(StopSession, env);
+
+ if (!channel_) {
+ RTC_LOG(LS_ERROR) << "Channel has not been created";
+ Java_VoipClient_onStopSessionCompleted(env_, j_voip_client_,
+ /*isSuccessful=*/false);
+ return;
+ }
+ if (voip_engine_->Base().StopSend(*channel_) != webrtc::VoipResult::kOk ||
+ voip_engine_->Base().StopPlayout(*channel_) != webrtc::VoipResult::kOk) {
+ Java_VoipClient_onStopSessionCompleted(env_, j_voip_client_,
+ /*isSuccessful=*/false);
+ return;
+ }
+
+ rtp_socket_->Close();
+ rtcp_socket_->Close();
+
+ webrtc::VoipResult result = voip_engine_->Base().ReleaseChannel(*channel_);
+ RTC_CHECK(result == webrtc::VoipResult::kOk);
+
+ channel_ = absl::nullopt;
+ Java_VoipClient_onStopSessionCompleted(env_, j_voip_client_,
+ /*isSuccessful=*/true);
+}
+
+void AndroidVoipClient::StartSend(JNIEnv* env) {
+ RUN_ON_VOIP_THREAD(StartSend, env);
+
+ if (!channel_) {
+ RTC_LOG(LS_ERROR) << "Channel has not been created";
+ Java_VoipClient_onStartSendCompleted(env_, j_voip_client_,
+ /*isSuccessful=*/false);
+ return;
+ }
+ bool sending_started =
+ (voip_engine_->Base().StartSend(*channel_) == webrtc::VoipResult::kOk);
+ Java_VoipClient_onStartSendCompleted(env_, j_voip_client_, sending_started);
+}
+
+void AndroidVoipClient::StopSend(JNIEnv* env) {
+ RUN_ON_VOIP_THREAD(StopSend, env);
+
+ if (!channel_) {
+ RTC_LOG(LS_ERROR) << "Channel has not been created";
+ Java_VoipClient_onStopSendCompleted(env_, j_voip_client_,
+ /*isSuccessful=*/false);
+ return;
+ }
+ bool sending_stopped =
+ (voip_engine_->Base().StopSend(*channel_) == webrtc::VoipResult::kOk);
+ Java_VoipClient_onStopSendCompleted(env_, j_voip_client_, sending_stopped);
+}
+
+void AndroidVoipClient::StartPlayout(JNIEnv* env) {
+ RUN_ON_VOIP_THREAD(StartPlayout, env);
+
+ if (!channel_) {
+ RTC_LOG(LS_ERROR) << "Channel has not been created";
+ Java_VoipClient_onStartPlayoutCompleted(env_, j_voip_client_,
+ /*isSuccessful=*/false);
+ return;
+ }
+ bool playout_started =
+ (voip_engine_->Base().StartPlayout(*channel_) == webrtc::VoipResult::kOk);
+ Java_VoipClient_onStartPlayoutCompleted(env_, j_voip_client_,
+ playout_started);
+}
+
+void AndroidVoipClient::StopPlayout(JNIEnv* env) {
+ RUN_ON_VOIP_THREAD(StopPlayout, env);
+
+ if (!channel_) {
+ RTC_LOG(LS_ERROR) << "Channel has not been created";
+ Java_VoipClient_onStopPlayoutCompleted(env_, j_voip_client_,
+ /*isSuccessful=*/false);
+ return;
+ }
+ bool playout_stopped =
+ (voip_engine_->Base().StopPlayout(*channel_) == webrtc::VoipResult::kOk);
+ Java_VoipClient_onStopPlayoutCompleted(env_, j_voip_client_, playout_stopped);
+}
+
+void AndroidVoipClient::Delete(JNIEnv* env) {
+ delete this;
+}
+
+void AndroidVoipClient::SendRtpPacket(const std::vector<uint8_t>& packet_copy) {
+ RTC_DCHECK_RUN_ON(voip_thread_.get());
+
+ if (!rtp_socket_->SendTo(packet_copy.data(), packet_copy.size(),
+ rtp_remote_address_, rtc::PacketOptions())) {
+ RTC_LOG(LS_ERROR) << "Failed to send RTP packet";
+ }
+}
+
+bool AndroidVoipClient::SendRtp(rtc::ArrayView<const uint8_t> packet,
+ const webrtc::PacketOptions& options) {
+ std::vector<uint8_t> packet_copy(packet.begin(), packet.end());
+ voip_thread_->PostTask([this, packet_copy = std::move(packet_copy)] {
+ SendRtpPacket(packet_copy);
+ });
+ return true;
+}
+
+void AndroidVoipClient::SendRtcpPacket(
+ const std::vector<uint8_t>& packet_copy) {
+ RTC_DCHECK_RUN_ON(voip_thread_.get());
+
+ if (!rtcp_socket_->SendTo(packet_copy.data(), packet_copy.size(),
+ rtcp_remote_address_, rtc::PacketOptions())) {
+ RTC_LOG(LS_ERROR) << "Failed to send RTCP packet";
+ }
+}
+
+bool AndroidVoipClient::SendRtcp(rtc::ArrayView<const uint8_t> packet) {
+ std::vector<uint8_t> packet_copy(packet.begin(), packet.end());
+ voip_thread_->PostTask([this, packet_copy = std::move(packet_copy)] {
+ SendRtcpPacket(packet_copy);
+ });
+ return true;
+}
+
+void AndroidVoipClient::ReadRTPPacket(const std::vector<uint8_t>& packet_copy) {
+ RTC_DCHECK_RUN_ON(voip_thread_.get());
+
+ if (!channel_) {
+ RTC_LOG(LS_ERROR) << "Channel has not been created";
+ return;
+ }
+ webrtc::VoipResult result = voip_engine_->Network().ReceivedRTPPacket(
+ *channel_,
+ rtc::ArrayView<const uint8_t>(packet_copy.data(), packet_copy.size()));
+ RTC_CHECK(result == webrtc::VoipResult::kOk);
+}
+
+void AndroidVoipClient::OnSignalReadRTPPacket(rtc::AsyncPacketSocket* socket,
+ const char* rtp_packet,
+ size_t size,
+ const rtc::SocketAddress& addr,
+ const int64_t& timestamp) {
+ std::vector<uint8_t> packet_copy(rtp_packet, rtp_packet + size);
+ voip_thread_->PostTask([this, packet_copy = std::move(packet_copy)] {
+ ReadRTPPacket(packet_copy);
+ });
+}
+
+void AndroidVoipClient::ReadRTCPPacket(
+ const std::vector<uint8_t>& packet_copy) {
+ RTC_DCHECK_RUN_ON(voip_thread_.get());
+
+ if (!channel_) {
+ RTC_LOG(LS_ERROR) << "Channel has not been created";
+ return;
+ }
+ webrtc::VoipResult result = voip_engine_->Network().ReceivedRTCPPacket(
+ *channel_,
+ rtc::ArrayView<const uint8_t>(packet_copy.data(), packet_copy.size()));
+ RTC_CHECK(result == webrtc::VoipResult::kOk);
+}
+
+void AndroidVoipClient::OnSignalReadRTCPPacket(rtc::AsyncPacketSocket* socket,
+ const char* rtcp_packet,
+ size_t size,
+ const rtc::SocketAddress& addr,
+ const int64_t& timestamp) {
+ std::vector<uint8_t> packet_copy(rtcp_packet, rtcp_packet + size);
+ voip_thread_->PostTask([this, packet_copy = std::move(packet_copy)] {
+ ReadRTCPPacket(packet_copy);
+ });
+}
+
+static jlong JNI_VoipClient_CreateClient(
+ JNIEnv* env,
+ const webrtc::JavaParamRef<jobject>& application_context,
+ const webrtc::JavaParamRef<jobject>& j_voip_client) {
+ return webrtc::NativeToJavaPointer(
+ AndroidVoipClient::Create(env, application_context, j_voip_client));
+}
+
+} // namespace webrtc_examples
diff --git a/third_party/libwebrtc/examples/androidvoip/jni/android_voip_client.h b/third_party/libwebrtc/examples/androidvoip/jni/android_voip_client.h
new file mode 100644
index 0000000000..e2f1c64590
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidvoip/jni/android_voip_client.h
@@ -0,0 +1,188 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef EXAMPLES_ANDROIDVOIP_JNI_ANDROID_VOIP_CLIENT_H_
+#define EXAMPLES_ANDROIDVOIP_JNI_ANDROID_VOIP_CLIENT_H_
+
+#include <jni.h>
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "api/audio_codecs/audio_format.h"
+#include "api/call/transport.h"
+#include "api/voip/voip_base.h"
+#include "api/voip/voip_engine.h"
+#include "rtc_base/async_packet_socket.h"
+#include "rtc_base/async_udp_socket.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/third_party/sigslot/sigslot.h"
+#include "rtc_base/thread.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc_examples {
+
+// AndroidVoipClient facilitates the use of the VoIP API defined in
+// api/voip/voip_engine.h. One instance of AndroidVoipClient should
+// suffice for most VoIP applications. AndroidVoipClient implements
+// webrtc::Transport to send RTP/RTCP packets to the remote endpoint.
+// It also creates methods (slots) for sockets to connect to in
+// order to receive RTP/RTCP packets. AndroidVoipClient does all
+// operations with rtc::Thread (voip_thread_), this is to comply
+// with consistent thread usage requirement with ProcessThread used
+// within VoipEngine, as well as providing asynchronicity to the
+// caller. AndroidVoipClient is meant to be used by Java through JNI.
+class AndroidVoipClient : public webrtc::Transport,
+ public sigslot::has_slots<> {
+ public:
+ // Returns a pointer to an AndroidVoipClient object. Clients should
+ // use this factory method to create AndroidVoipClient objects. The
+ // method will return a nullptr in case of initialization errors.
+ // It is the client's responsibility to delete the pointer when
+ // they are done with it (this class provides a Delete() method).
+ static AndroidVoipClient* Create(
+ JNIEnv* env,
+ const webrtc::JavaParamRef<jobject>& application_context,
+ const webrtc::JavaParamRef<jobject>& j_voip_client);
+
+ ~AndroidVoipClient() override;
+
+ // Provides client with a Java List of Strings containing names of
+ // the built-in supported codecs through callback.
+ void GetSupportedCodecs(JNIEnv* env);
+
+ // Provides client with a Java String of the default local IPv4 address
+ // through callback. If IPv4 address is not found, provide the default
+ // local IPv6 address. If IPv6 address is not found, provide an empty
+ // string.
+ void GetLocalIPAddress(JNIEnv* env);
+
+ // Sets the encoder used by the VoIP API.
+ void SetEncoder(JNIEnv* env,
+ const webrtc::JavaParamRef<jstring>& j_encoder_string);
+
+ // Sets the decoders used by the VoIP API.
+ void SetDecoders(JNIEnv* env,
+ const webrtc::JavaParamRef<jobject>& j_decoder_strings);
+
+ // Sets two local/remote addresses, one for RTP packets, and another for
+ // RTCP packets. The RTP address will have IP address j_ip_address_string
+ // and port number j_port_number_int, the RTCP address will have IP address
+ // j_ip_address_string and port number j_port_number_int+1.
+ void SetLocalAddress(JNIEnv* env,
+ const webrtc::JavaParamRef<jstring>& j_ip_address_string,
+ jint j_port_number_int);
+ void SetRemoteAddress(
+ JNIEnv* env,
+ const webrtc::JavaParamRef<jstring>& j_ip_address_string,
+ jint j_port_number_int);
+
+ // Starts a VoIP session, then calls a callback method with a boolean
+ // value indicating if the session has started successfully. The VoIP
+ // operations below can only be used after a session has already started.
+ void StartSession(JNIEnv* env);
+
+ // Stops the current session, then calls a callback method with a
+ // boolean value indicating if the session has stopped successfully.
+ void StopSession(JNIEnv* env);
+
+ // Starts sending RTP/RTCP packets to the remote endpoint, then calls
+ // a callback method with a boolean value indicating if sending
+ // has started successfully.
+ void StartSend(JNIEnv* env);
+
+ // Stops sending RTP/RTCP packets to the remote endpoint, then calls
+ // a callback method with a boolean value indicating if sending
+ // has stopped successfully.
+ void StopSend(JNIEnv* env);
+
+ // Starts playing out the voice data received from the remote endpoint,
+ // then calls a callback method with a boolean value indicating if
+ // playout has started successfully.
+ void StartPlayout(JNIEnv* env);
+
+ // Stops playing out the voice data received from the remote endpoint,
+ // then calls a callback method with a boolean value indicating if
+ // playout has stopped successfully.
+ void StopPlayout(JNIEnv* env);
+
+ // Deletes this object. Used by client when they are done.
+ void Delete(JNIEnv* env);
+
+ // Implementation for Transport.
+ bool SendRtp(rtc::ArrayView<const uint8_t> packet,
+ const webrtc::PacketOptions& options) override;
+ bool SendRtcp(rtc::ArrayView<const uint8_t> packet) override;
+
+ // Slots for sockets to connect to.
+ void OnSignalReadRTPPacket(rtc::AsyncPacketSocket* socket,
+ const char* rtp_packet,
+ size_t size,
+ const rtc::SocketAddress& addr,
+ const int64_t& timestamp);
+ void OnSignalReadRTCPPacket(rtc::AsyncPacketSocket* socket,
+ const char* rtcp_packet,
+ size_t size,
+ const rtc::SocketAddress& addr,
+ const int64_t& timestamp);
+
+ private:
+ AndroidVoipClient(JNIEnv* env,
+ const webrtc::JavaParamRef<jobject>& j_voip_client)
+ : voip_thread_(rtc::Thread::CreateWithSocketServer()),
+ j_voip_client_(env, j_voip_client) {}
+
+ void Init(JNIEnv* env,
+ const webrtc::JavaParamRef<jobject>& application_context);
+
+ // Overloaded methods having native C++ variables as arguments.
+ void SetEncoder(const std::string& encoder);
+ void SetDecoders(const std::vector<std::string>& decoders);
+ void SetLocalAddress(const std::string& ip_address, int port_number);
+ void SetRemoteAddress(const std::string& ip_address, int port_number);
+
+ // Methods to send and receive RTP/RTCP packets. Takes in a
+ // copy of a packet as a vector to prolong the lifetime of
+ // the packet as these methods will be called asynchronously.
+ void SendRtpPacket(const std::vector<uint8_t>& packet_copy);
+ void SendRtcpPacket(const std::vector<uint8_t>& packet_copy);
+ void ReadRTPPacket(const std::vector<uint8_t>& packet_copy);
+ void ReadRTCPPacket(const std::vector<uint8_t>& packet_copy);
+
+ // Used to invoke operations and send/receive RTP/RTCP packets.
+ std::unique_ptr<rtc::Thread> voip_thread_;
+ // Reference to the VoipClient java instance used to
+ // invoke callbacks when operations are finished.
+ webrtc::ScopedJavaGlobalRef<jobject> j_voip_client_
+ RTC_GUARDED_BY(voip_thread_);
+ // A list of AudioCodecSpec supported by the built-in
+ // encoder/decoder factories.
+ std::vector<webrtc::AudioCodecSpec> supported_codecs_
+ RTC_GUARDED_BY(voip_thread_);
+ // A JNI context used by the voip_thread_.
+ JNIEnv* env_ RTC_GUARDED_BY(voip_thread_);
+ // The entry point to all VoIP APIs.
+ std::unique_ptr<webrtc::VoipEngine> voip_engine_ RTC_GUARDED_BY(voip_thread_);
+ // Used by the VoIP API to facilitate a VoIP session.
+ absl::optional<webrtc::ChannelId> channel_ RTC_GUARDED_BY(voip_thread_);
+ // Members below are used for network related operations.
+ std::unique_ptr<rtc::AsyncUDPSocket> rtp_socket_ RTC_GUARDED_BY(voip_thread_);
+ std::unique_ptr<rtc::AsyncUDPSocket> rtcp_socket_
+ RTC_GUARDED_BY(voip_thread_);
+ rtc::SocketAddress rtp_local_address_ RTC_GUARDED_BY(voip_thread_);
+ rtc::SocketAddress rtcp_local_address_ RTC_GUARDED_BY(voip_thread_);
+ rtc::SocketAddress rtp_remote_address_ RTC_GUARDED_BY(voip_thread_);
+ rtc::SocketAddress rtcp_remote_address_ RTC_GUARDED_BY(voip_thread_);
+};
+
+} // namespace webrtc_examples
+
+#endif // EXAMPLES_ANDROIDVOIP_JNI_ANDROID_VOIP_CLIENT_H_
diff --git a/third_party/libwebrtc/examples/androidvoip/jni/onload.cc b/third_party/libwebrtc/examples/androidvoip/jni/onload.cc
new file mode 100644
index 0000000000..b952de348b
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidvoip/jni/onload.cc
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+
+#include "rtc_base/ssl_adapter.h"
+#include "sdk/android/native_api/base/init.h"
+
+namespace webrtc_examples {
+
+extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM* jvm, void* reserved) {
+ webrtc::InitAndroid(jvm);
+ RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()";
+ return JNI_VERSION_1_6;
+}
+
+extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM* jvm, void* reserved) {
+ RTC_CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()";
+}
+
+} // namespace webrtc_examples
diff --git a/third_party/libwebrtc/examples/androidvoip/res/layout/activity_main.xml b/third_party/libwebrtc/examples/androidvoip/res/layout/activity_main.xml
new file mode 100644
index 0000000000..c7fa5a9b31
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidvoip/res/layout/activity_main.xml
@@ -0,0 +1,303 @@
+<?xml version="1.0" encoding="utf-8"?>
+<ScrollView
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ xmlns:app="http://schemas.android.com/apk/res-auto"
+ xmlns:tools="http://schemas.android.com/tools"
+ android:id="@+id/scroll_view"
+ android:layout_width="match_parent"
+ android:layout_height="wrap_content"
+ android:focusable="true"
+ android:focusableInTouchMode="true"
+ tools:context="org.webrtc.examples.androidvoip.MainActivity">
+
+ <LinearLayout
+ android:orientation="vertical"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:padding="8dp">
+
+ <TextView
+ android:layout_width="match_parent"
+ android:layout_height="wrap_content"
+ android:layout_marginBottom="15dp"
+ android:layout_marginLeft="15dp"
+ android:layout_marginTop="15dp"
+ android:text="@string/local_endpoint_text_view"
+ android:textSize="19dp"
+ android:textStyle="bold"
+ android:textColor="@color/almost_black" />
+
+ <!--Local IP Adress-->
+ <LinearLayout
+ android:orientation="horizontal"
+ android:layout_width="match_parent"
+ android:layout_height="48dp"
+ android:layout_gravity="center_vertical" >
+
+ <TextView
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_marginTop="12dp"
+ android:layout_marginLeft="15dp"
+ android:layout_marginRight="15dp"
+ android:text="@string/ip_address_text_view"
+ android:textSize="16dp" />
+
+ <TextView
+ android:id="@+id/local_ip_address_text_view"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_marginTop="12dp"
+ android:layout_marginRight="15dp"
+ android:textSize="16dp" />
+
+ </LinearLayout>
+
+ <!--Local Port Number-->
+ <LinearLayout
+ android:orientation="horizontal"
+ android:layout_width="match_parent"
+ android:layout_height="48dp"
+ android:layout_gravity="center_vertical">
+
+ <TextView
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_marginBottom="15dp"
+ android:layout_marginLeft="15dp"
+ android:layout_marginRight="15dp"
+ android:text="@string/port_number_text_view"
+ android:textSize="16dp" />
+
+ <EditText
+ android:id="@+id/local_port_number_edit_text"
+ android:layout_width="0dp"
+ android:layout_height="match_parent"
+ android:layout_marginRight="15dp"
+ android:layout_weight="1"
+ android:text="10000"
+ android:inputType="number"
+ android:textSize="16dp" />
+
+ </LinearLayout>
+
+ <TextView
+ android:layout_width="match_parent"
+ android:layout_height="wrap_content"
+ android:layout_marginBottom="15dp"
+ android:layout_marginLeft="15dp"
+ android:layout_marginTop="30dp"
+ android:text="@string/remote_endpoint_text_view"
+ android:textSize="19dp"
+ android:textStyle="bold"
+ android:textColor="@color/almost_black" />
+
+ <!--Remote IP Adress-->
+ <LinearLayout
+ android:orientation="horizontal"
+ android:layout_width="match_parent"
+ android:layout_height="48dp"
+ android:layout_gravity="center_vertical">
+
+ <TextView
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_marginBottom="15dp"
+ android:layout_marginLeft="15dp"
+ android:layout_marginRight="15dp"
+ android:text="@string/ip_address_text_view"
+ android:textSize="16dp" />
+
+ <EditText
+ android:id="@+id/remote_ip_address_edit_text"
+ android:layout_width="0dp"
+ android:layout_height="wrap_content"
+ android:layout_marginRight="15dp"
+ android:layout_weight="1"
+ android:inputType="number"
+ android:digits="0123456789."
+ android:textSize="16dp" />
+
+ </LinearLayout>
+
+ <!--Remote Port Number-->
+ <LinearLayout
+ android:orientation="horizontal"
+ android:layout_width="match_parent"
+ android:layout_height="48dp"
+ android:layout_gravity="center_vertical">
+
+ <TextView
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_marginBottom="15dp"
+ android:layout_marginLeft="15dp"
+ android:layout_marginRight="15dp"
+ android:text="@string/port_number_text_view"
+ android:textSize="16dp" />
+
+ <EditText
+ android:id="@+id/remote_port_number_edit_text"
+ android:layout_width="0dp"
+ android:layout_height="match_parent"
+ android:layout_marginRight="15dp"
+ android:layout_weight="1"
+ android:text="10000"
+ android:inputType="number"
+ android:textSize="16dp" />
+
+ </LinearLayout>
+
+ <TextView
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_marginBottom="15dp"
+ android:layout_marginLeft="15dp"
+ android:layout_marginTop="30dp"
+ android:text="@string/encoder_text_view"
+ android:textSize="19dp"
+ android:textStyle="bold"
+ android:textColor="@color/almost_black" />
+
+ <Spinner
+ android:id="@+id/encoder_spinner"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_marginBottom="15dp"
+ android:layout_marginLeft="15dp"
+ android:layout_marginTop="10dp"/>
+
+ <LinearLayout
+ android:orientation="horizontal"
+ android:layout_width="match_parent"
+ android:layout_height="48dp"
+ android:layout_marginTop="20dp"
+ android:layout_gravity="center_vertical">
+
+ <TextView
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_marginBottom="15dp"
+ android:layout_marginLeft="15dp"
+ android:layout_marginRight="25dp"
+ android:text="@string/decoder_text_view"
+ android:textSize="19dp"
+ android:textStyle="bold"
+ android:textColor="@color/almost_black" />
+
+ <Button
+ android:id="@+id/decoder_selection_button"
+ android:text="@string/decoder_selection_button"
+ style="?android:attr/buttonBarButtonStyle"
+ android:layout_width="0dp"
+ android:layout_height="match_parent"
+ android:layout_marginRight="15dp"
+ android:layout_weight="1" />
+
+ </LinearLayout>
+
+
+ <TextView
+ android:id="@+id/decoders_text_view"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_marginTop="15dp"
+ android:layout_marginBottom="30dp"
+ android:layout_marginLeft="15dp"
+ android:layout_marginRight="15dp"
+ android:text="@string/decoders_text_view_default"
+ android:textSize="16dp" />
+
+
+ <RelativeLayout
+ android:id="@+id/switch_layout"
+ android:layout_width="fill_parent"
+ android:layout_height="wrap_content"
+ android:layout_marginTop="15dp"
+ android:visibility="gone" >
+
+ <View
+ android:id="@+id/divider"
+ android:layout_width="match_parent"
+ android:layout_height="1dp"
+ android:layout_marginLeft="15dp"
+ android:layout_marginRight="15dp"
+ android:layout_marginBottom="45dp"
+ android:background="@color/light_gray" />
+
+ <LinearLayout
+ android:id="@+id/start_send_switch_layout"
+ android:orientation="horizontal"
+ android:layout_width="match_parent"
+ android:layout_height="48dp"
+ android:layout_gravity="center_vertical"
+ android:layout_below="@id/divider" >
+
+ <TextView
+ android:layout_width="wrap_content"
+ android:layout_height="match_parent"
+ android:layout_marginLeft="15dp"
+ android:gravity="left"
+ android:layout_weight="1"
+ android:text="@string/start_send_text_view"
+ android:textSize="16dp" />
+
+ <Switch
+ android:id="@+id/start_send_switch"
+ android:layout_width="wrap_content"
+ android:layout_height="match_parent"
+ android:layout_marginRight="15dp"
+ android:gravity="right"
+ android:layout_weight="1" />
+
+ </LinearLayout>
+
+ <LinearLayout
+ android:orientation="horizontal"
+ android:layout_width="match_parent"
+ android:layout_height="48dp"
+ android:layout_gravity="center_vertical"
+ android:layout_below="@id/start_send_switch_layout">
+
+ <TextView
+ android:id="@+id/start_playout_text_view"
+ android:layout_width="wrap_content"
+ android:layout_height="match_parent"
+ android:layout_marginLeft="15dp"
+ android:gravity="left"
+ android:layout_weight="1"
+ android:text="@string/start_playout_text_view"
+ android:textSize="16dp" />
+
+ <Switch
+ android:id="@+id/start_playout_switch"
+ android:layout_width="wrap_content"
+ android:layout_height="match_parent"
+ android:layout_marginRight="15dp"
+ android:gravity="right"
+ android:layout_weight="1" />
+
+ </LinearLayout>
+
+ </RelativeLayout>
+
+ <LinearLayout
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:gravity="center"
+ android:orientation="vertical" >
+
+ <ToggleButton
+ android:id="@+id/session_button"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_margin="8dp"
+ android:textOff="@string/session_button_text_off"
+ android:textOn="@string/session_button_text_on"
+ style="?android:attr/buttonStyle" />
+
+ </LinearLayout>
+
+ </LinearLayout>
+
+</ScrollView>
diff --git a/third_party/libwebrtc/examples/androidvoip/res/values/colors.xml b/third_party/libwebrtc/examples/androidvoip/res/values/colors.xml
new file mode 100644
index 0000000000..4dadaa9941
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidvoip/res/values/colors.xml
@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <color name="almost_black">#484848</color>
+ <color name="light_gray">#D3D3D3</color>
+</resources> \ No newline at end of file
diff --git a/third_party/libwebrtc/examples/androidvoip/res/values/strings.xml b/third_party/libwebrtc/examples/androidvoip/res/values/strings.xml
new file mode 100644
index 0000000000..d519bfbbb6
--- /dev/null
+++ b/third_party/libwebrtc/examples/androidvoip/res/values/strings.xml
@@ -0,0 +1,19 @@
+<resources>
+ <string name="app_name">androidvoip</string>
+ <string name="local_endpoint_text_view">Local Endpoint</string>
+ <string name="remote_endpoint_text_view">Remote Endpoint</string>
+ <string name="ip_address_text_view">IP Address:</string>
+ <string name="port_number_text_view">Port Number:</string>
+ <string name="encoder_text_view">Select Encoder</string>
+ <string name="decoder_text_view">Select Decoder</string>
+ <string name="decoder_selection_button">Configure Selection</string>
+ <string name="decoders_text_view_default">No decoders selected</string>
+ <string name="dialog_title">Choose Decoders</string>
+ <string name="ok_label">Ok</string>
+ <string name="dismiss_label">Dismiss</string>
+ <string name="clear_all_label">Clear All</string>
+ <string name="start_send_text_view">Start Sending</string>
+ <string name="start_playout_text_view">Start Playout</string>
+ <string name="session_button_text_off">Start Session</string>
+ <string name="session_button_text_on">Stop Session</string>
+</resources>
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppClient+Internal.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppClient+Internal.h
new file mode 100644
index 0000000000..31e0e4dd7c
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppClient+Internal.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDAppClient.h"
+
+#import "sdk/objc/api/peerconnection/RTCPeerConnection.h"
+
+#import "ARDRoomServerClient.h"
+#import "ARDSignalingChannel.h"
+#import "ARDTURNClient.h"
+
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+
+@interface ARDAppClient () <ARDSignalingChannelDelegate, RTC_OBJC_TYPE (RTCPeerConnectionDelegate)>
+
+// All properties should only be mutated from the main queue.
+@property(nonatomic, strong) id<ARDRoomServerClient> roomServerClient;
+@property(nonatomic, strong) id<ARDSignalingChannel> channel;
+@property(nonatomic, strong) id<ARDSignalingChannel> loopbackChannel;
+@property(nonatomic, strong) id<ARDTURNClient> turnClient;
+
+@property(nonatomic, strong) RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection;
+@property(nonatomic, strong) RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+@property(nonatomic, strong) NSMutableArray *messageQueue;
+
+@property(nonatomic, assign) BOOL isTurnComplete;
+@property(nonatomic, assign) BOOL hasReceivedSdp;
+@property(nonatomic, readonly) BOOL hasJoinedRoomServerRoom;
+
+@property(nonatomic, strong) NSString *roomId;
+@property(nonatomic, strong) NSString *clientId;
+@property(nonatomic, assign) BOOL isInitiator;
+@property(nonatomic, strong) NSMutableArray *iceServers;
+@property(nonatomic, strong) NSURL *webSocketURL;
+@property(nonatomic, strong) NSURL *webSocketRestURL;
+@property(nonatomic, readonly) BOOL isLoopback;
+
+@property(nonatomic, strong) RTC_OBJC_TYPE(RTCMediaConstraints) * defaultPeerConnectionConstraints;
+
+- (instancetype)initWithRoomServerClient:(id<ARDRoomServerClient>)rsClient
+ signalingChannel:(id<ARDSignalingChannel>)channel
+ turnClient:(id<ARDTURNClient>)turnClient
+ delegate:(id<ARDAppClientDelegate>)delegate;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppClient.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppClient.h
new file mode 100644
index 0000000000..91d2cef1ce
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppClient.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "sdk/objc/api/peerconnection/RTCPeerConnection.h"
+#import "sdk/objc/api/peerconnection/RTCVideoTrack.h"
+
+typedef NS_ENUM(NSInteger, ARDAppClientState) {
+ // Disconnected from servers.
+ kARDAppClientStateDisconnected,
+ // Connecting to servers.
+ kARDAppClientStateConnecting,
+ // Connected to servers.
+ kARDAppClientStateConnected,
+};
+
+@class ARDAppClient;
+@class ARDSettingsModel;
+@class ARDExternalSampleCapturer;
+@class RTC_OBJC_TYPE(RTCMediaConstraints);
+@class RTC_OBJC_TYPE(RTCCameraVideoCapturer);
+@class RTC_OBJC_TYPE(RTCFileVideoCapturer);
+
+// The delegate is informed of pertinent events and will be called on the
+// main queue.
+@protocol ARDAppClientDelegate <NSObject>
+
+- (void)appClient:(ARDAppClient *)client didChangeState:(ARDAppClientState)state;
+
+- (void)appClient:(ARDAppClient *)client didChangeConnectionState:(RTCIceConnectionState)state;
+
+- (void)appClient:(ARDAppClient *)client
+ didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer;
+
+- (void)appClient:(ARDAppClient *)client
+ didReceiveLocalVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)localVideoTrack;
+
+- (void)appClient:(ARDAppClient *)client
+ didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack;
+
+- (void)appClient:(ARDAppClient *)client didError:(NSError *)error;
+
+- (void)appClient:(ARDAppClient *)client didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats;
+
+@optional
+- (void)appClient:(ARDAppClient *)client
+ didCreateLocalFileCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)fileCapturer;
+
+- (void)appClient:(ARDAppClient *)client
+ didCreateLocalExternalSampleCapturer:(ARDExternalSampleCapturer *)externalSampleCapturer;
+
+@end
+
+// Handles connections to the AppRTC server for a given room. Methods on this
+// class should only be called from the main queue.
+@interface ARDAppClient : NSObject
+
+// If `shouldGetStats` is true, stats will be reported in 1s intervals through
+// the delegate.
+@property(nonatomic, assign) BOOL shouldGetStats;
+@property(nonatomic, readonly) ARDAppClientState state;
+@property(nonatomic, weak) id<ARDAppClientDelegate> delegate;
+@property(nonatomic, assign, getter=isBroadcast) BOOL broadcast;
+
+// Convenience constructor since all expected use cases will need a delegate
+// in order to receive remote tracks.
+- (instancetype)initWithDelegate:(id<ARDAppClientDelegate>)delegate;
+
+// Establishes a connection with the AppRTC servers for the given room id.
+// `settings` is an object containing settings such as video codec for the call.
+// If `isLoopback` is true, the call will connect to itself.
+- (void)connectToRoomWithId:(NSString *)roomId
+ settings:(ARDSettingsModel *)settings
+ isLoopback:(BOOL)isLoopback;
+
+// Disconnects from the AppRTC servers and any connected clients.
+- (void)disconnect;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppClient.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppClient.m
new file mode 100644
index 0000000000..4420972598
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppClient.m
@@ -0,0 +1,899 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDAppClient+Internal.h"
+
+#import "sdk/objc/api/peerconnection/RTCAudioTrack.h"
+#import "sdk/objc/api/peerconnection/RTCConfiguration.h"
+#import "sdk/objc/api/peerconnection/RTCFileLogger.h"
+#import "sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent.h"
+#import "sdk/objc/api/peerconnection/RTCIceServer.h"
+#import "sdk/objc/api/peerconnection/RTCMediaConstraints.h"
+#import "sdk/objc/api/peerconnection/RTCMediaStream.h"
+#import "sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h"
+#import "sdk/objc/api/peerconnection/RTCRtpSender.h"
+#import "sdk/objc/api/peerconnection/RTCRtpTransceiver.h"
+#import "sdk/objc/api/peerconnection/RTCTracing.h"
+#import "sdk/objc/api/peerconnection/RTCVideoSource.h"
+#import "sdk/objc/api/peerconnection/RTCVideoTrack.h"
+#import "sdk/objc/base/RTCLogging.h"
+#import "sdk/objc/components/capturer/RTCCameraVideoCapturer.h"
+#import "sdk/objc/components/capturer/RTCFileVideoCapturer.h"
+#import "sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h"
+#import "sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h"
+
+#import "ARDAppEngineClient.h"
+#import "ARDExternalSampleCapturer.h"
+#import "ARDJoinResponse.h"
+#import "ARDMessageResponse.h"
+#import "ARDSettingsModel.h"
+#import "ARDSignalingMessage.h"
+#import "ARDTURNClient+Internal.h"
+#import "ARDUtilities.h"
+#import "ARDWebSocketChannel.h"
+#import "RTCIceCandidate+JSON.h"
+#import "RTCSessionDescription+JSON.h"
+
+static NSString * const kARDIceServerRequestUrl = @"https://appr.tc/params";
+
+static NSString * const kARDAppClientErrorDomain = @"ARDAppClient";
+static NSInteger const kARDAppClientErrorUnknown = -1;
+static NSInteger const kARDAppClientErrorRoomFull = -2;
+static NSInteger const kARDAppClientErrorCreateSDP = -3;
+static NSInteger const kARDAppClientErrorSetSDP = -4;
+static NSInteger const kARDAppClientErrorInvalidClient = -5;
+static NSInteger const kARDAppClientErrorInvalidRoom = -6;
+static NSString * const kARDMediaStreamId = @"ARDAMS";
+static NSString * const kARDAudioTrackId = @"ARDAMSa0";
+static NSString * const kARDVideoTrackId = @"ARDAMSv0";
+static NSString * const kARDVideoTrackKind = @"video";
+
+// TODO(tkchin): Add these as UI options.
+#if defined(WEBRTC_IOS)
+static BOOL const kARDAppClientEnableTracing = NO;
+static BOOL const kARDAppClientEnableRtcEventLog = YES;
+static int64_t const kARDAppClientAecDumpMaxSizeInBytes = 5e6; // 5 MB.
+static int64_t const kARDAppClientRtcEventLogMaxSizeInBytes = 5e6; // 5 MB.
+#endif
+static int const kKbpsMultiplier = 1000;
+
+// We need a proxy to NSTimer because it causes a strong retain cycle. When
+// using the proxy, `invalidate` must be called before it properly deallocs.
+@interface ARDTimerProxy : NSObject
+
+- (instancetype)initWithInterval:(NSTimeInterval)interval
+ repeats:(BOOL)repeats
+ timerHandler:(void (^)(void))timerHandler;
+- (void)invalidate;
+
+@end
+
+@implementation ARDTimerProxy {
+ NSTimer *_timer;
+ void (^_timerHandler)(void);
+}
+
+- (instancetype)initWithInterval:(NSTimeInterval)interval
+ repeats:(BOOL)repeats
+ timerHandler:(void (^)(void))timerHandler {
+ NSParameterAssert(timerHandler);
+ if (self = [super init]) {
+ _timerHandler = timerHandler;
+ _timer = [NSTimer scheduledTimerWithTimeInterval:interval
+ target:self
+ selector:@selector(timerDidFire:)
+ userInfo:nil
+ repeats:repeats];
+ }
+ return self;
+}
+
+- (void)invalidate {
+ [_timer invalidate];
+}
+
+- (void)timerDidFire:(NSTimer *)timer {
+ _timerHandler();
+}
+
+@end
+
+@implementation ARDAppClient {
+ RTC_OBJC_TYPE(RTCFileLogger) * _fileLogger;
+ ARDTimerProxy *_statsTimer;
+ ARDSettingsModel *_settings;
+ RTC_OBJC_TYPE(RTCVideoTrack) * _localVideoTrack;
+}
+
+@synthesize shouldGetStats = _shouldGetStats;
+@synthesize state = _state;
+@synthesize delegate = _delegate;
+@synthesize roomServerClient = _roomServerClient;
+@synthesize channel = _channel;
+@synthesize loopbackChannel = _loopbackChannel;
+@synthesize turnClient = _turnClient;
+@synthesize peerConnection = _peerConnection;
+@synthesize factory = _factory;
+@synthesize messageQueue = _messageQueue;
+@synthesize isTurnComplete = _isTurnComplete;
+@synthesize hasReceivedSdp = _hasReceivedSdp;
+@synthesize roomId = _roomId;
+@synthesize clientId = _clientId;
+@synthesize isInitiator = _isInitiator;
+@synthesize iceServers = _iceServers;
+@synthesize webSocketURL = _websocketURL;
+@synthesize webSocketRestURL = _websocketRestURL;
+@synthesize defaultPeerConnectionConstraints =
+ _defaultPeerConnectionConstraints;
+@synthesize isLoopback = _isLoopback;
+@synthesize broadcast = _broadcast;
+
+- (instancetype)init {
+ return [self initWithDelegate:nil];
+}
+
+- (instancetype)initWithDelegate:(id<ARDAppClientDelegate>)delegate {
+ if (self = [super init]) {
+ _roomServerClient = [[ARDAppEngineClient alloc] init];
+ _delegate = delegate;
+ NSURL *turnRequestURL = [NSURL URLWithString:kARDIceServerRequestUrl];
+ _turnClient = [[ARDTURNClient alloc] initWithURL:turnRequestURL];
+ [self configure];
+ }
+ return self;
+}
+
+// TODO(tkchin): Provide signaling channel factory interface so we can recreate
+// channel if we need to on network failure. Also, make this the default public
+// constructor.
+- (instancetype)initWithRoomServerClient:(id<ARDRoomServerClient>)rsClient
+ signalingChannel:(id<ARDSignalingChannel>)channel
+ turnClient:(id<ARDTURNClient>)turnClient
+ delegate:(id<ARDAppClientDelegate>)delegate {
+ NSParameterAssert(rsClient);
+ NSParameterAssert(channel);
+ NSParameterAssert(turnClient);
+ if (self = [super init]) {
+ _roomServerClient = rsClient;
+ _channel = channel;
+ _turnClient = turnClient;
+ _delegate = delegate;
+ [self configure];
+ }
+ return self;
+}
+
+- (void)configure {
+ _messageQueue = [NSMutableArray array];
+ _iceServers = [NSMutableArray array];
+ _fileLogger = [[RTC_OBJC_TYPE(RTCFileLogger) alloc] init];
+ [_fileLogger start];
+}
+
+- (void)dealloc {
+ self.shouldGetStats = NO;
+ [self disconnect];
+}
+
+- (void)setShouldGetStats:(BOOL)shouldGetStats {
+ if (_shouldGetStats == shouldGetStats) {
+ return;
+ }
+ if (shouldGetStats) {
+ __weak ARDAppClient *weakSelf = self;
+ _statsTimer = [[ARDTimerProxy alloc] initWithInterval:1
+ repeats:YES
+ timerHandler:^{
+ ARDAppClient *strongSelf = weakSelf;
+ [strongSelf.peerConnection statisticsWithCompletionHandler:^(
+ RTC_OBJC_TYPE(RTCStatisticsReport) * stats) {
+ dispatch_async(dispatch_get_main_queue(), ^{
+ ARDAppClient *strongSelf = weakSelf;
+ [strongSelf.delegate appClient:strongSelf didGetStats:stats];
+ });
+ }];
+ }];
+ } else {
+ [_statsTimer invalidate];
+ _statsTimer = nil;
+ }
+ _shouldGetStats = shouldGetStats;
+}
+
+- (void)setState:(ARDAppClientState)state {
+ if (_state == state) {
+ return;
+ }
+ _state = state;
+ [_delegate appClient:self didChangeState:_state];
+}
+
+- (void)connectToRoomWithId:(NSString *)roomId
+ settings:(ARDSettingsModel *)settings
+ isLoopback:(BOOL)isLoopback {
+ NSParameterAssert(roomId.length);
+ NSParameterAssert(_state == kARDAppClientStateDisconnected);
+ _settings = settings;
+ _isLoopback = isLoopback;
+ self.state = kARDAppClientStateConnecting;
+
+ RTC_OBJC_TYPE(RTCDefaultVideoDecoderFactory) *decoderFactory =
+ [[RTC_OBJC_TYPE(RTCDefaultVideoDecoderFactory) alloc] init];
+ RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) *encoderFactory =
+ [[RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) alloc] init];
+ encoderFactory.preferredCodec = [settings currentVideoCodecSettingFromStore];
+ _factory =
+ [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] initWithEncoderFactory:encoderFactory
+ decoderFactory:decoderFactory];
+
+#if defined(WEBRTC_IOS)
+ if (kARDAppClientEnableTracing) {
+ NSString *filePath = [self documentsFilePathForFileName:@"webrtc-trace.txt"];
+ RTCStartInternalCapture(filePath);
+ }
+#endif
+
+ // Request TURN.
+ __weak ARDAppClient *weakSelf = self;
+ [_turnClient requestServersWithCompletionHandler:^(NSArray *turnServers,
+ NSError *error) {
+ if (error) {
+ RTCLogError(@"Error retrieving TURN servers: %@", error.localizedDescription);
+ }
+ ARDAppClient *strongSelf = weakSelf;
+ [strongSelf.iceServers addObjectsFromArray:turnServers];
+ strongSelf.isTurnComplete = YES;
+ [strongSelf startSignalingIfReady];
+ }];
+
+ // Join room on room server.
+ [_roomServerClient joinRoomWithRoomId:roomId
+ isLoopback:isLoopback
+ completionHandler:^(ARDJoinResponse *response, NSError *error) {
+ ARDAppClient *strongSelf = weakSelf;
+ if (error) {
+ [strongSelf.delegate appClient:strongSelf didError:error];
+ return;
+ }
+ NSError *joinError =
+ [[strongSelf class] errorForJoinResultType:response.result];
+ if (joinError) {
+ RTCLogError(@"Failed to join room:%@ on room server.", roomId);
+ [strongSelf disconnect];
+ [strongSelf.delegate appClient:strongSelf didError:joinError];
+ return;
+ }
+ RTCLog(@"Joined room:%@ on room server.", roomId);
+ strongSelf.roomId = response.roomId;
+ strongSelf.clientId = response.clientId;
+ strongSelf.isInitiator = response.isInitiator;
+ for (ARDSignalingMessage *message in response.messages) {
+ if (message.type == kARDSignalingMessageTypeOffer ||
+ message.type == kARDSignalingMessageTypeAnswer) {
+ strongSelf.hasReceivedSdp = YES;
+ [strongSelf.messageQueue insertObject:message atIndex:0];
+ } else {
+ [strongSelf.messageQueue addObject:message];
+ }
+ }
+ strongSelf.webSocketURL = response.webSocketURL;
+ strongSelf.webSocketRestURL = response.webSocketRestURL;
+ [strongSelf registerWithColliderIfReady];
+ [strongSelf startSignalingIfReady];
+ }];
+}
+
+- (void)disconnect {
+ if (_state == kARDAppClientStateDisconnected) {
+ return;
+ }
+ if (self.hasJoinedRoomServerRoom) {
+ [_roomServerClient leaveRoomWithRoomId:_roomId
+ clientId:_clientId
+ completionHandler:nil];
+ }
+ if (_channel) {
+ if (_channel.state == kARDSignalingChannelStateRegistered) {
+ // Tell the other client we're hanging up.
+ ARDByeMessage *byeMessage = [[ARDByeMessage alloc] init];
+ [_channel sendMessage:byeMessage];
+ }
+ // Disconnect from collider.
+ _channel = nil;
+ }
+ _clientId = nil;
+ _roomId = nil;
+ _isInitiator = NO;
+ _hasReceivedSdp = NO;
+ _messageQueue = [NSMutableArray array];
+ _localVideoTrack = nil;
+#if defined(WEBRTC_IOS)
+ [_factory stopAecDump];
+ [_peerConnection stopRtcEventLog];
+#endif
+ [_peerConnection close];
+ _peerConnection = nil;
+ self.state = kARDAppClientStateDisconnected;
+#if defined(WEBRTC_IOS)
+ if (kARDAppClientEnableTracing) {
+ RTCStopInternalCapture();
+ }
+#endif
+}
+
+#pragma mark - ARDSignalingChannelDelegate
+
+- (void)channel:(id<ARDSignalingChannel>)channel
+ didReceiveMessage:(ARDSignalingMessage *)message {
+ switch (message.type) {
+ case kARDSignalingMessageTypeOffer:
+ case kARDSignalingMessageTypeAnswer:
+ // Offers and answers must be processed before any other message, so we
+ // place them at the front of the queue.
+ _hasReceivedSdp = YES;
+ [_messageQueue insertObject:message atIndex:0];
+ break;
+ case kARDSignalingMessageTypeCandidate:
+ case kARDSignalingMessageTypeCandidateRemoval:
+ [_messageQueue addObject:message];
+ break;
+ case kARDSignalingMessageTypeBye:
+ // Disconnects can be processed immediately.
+ [self processSignalingMessage:message];
+ return;
+ }
+ [self drainMessageQueueIfReady];
+}
+
+- (void)channel:(id<ARDSignalingChannel>)channel
+ didChangeState:(ARDSignalingChannelState)state {
+ switch (state) {
+ case kARDSignalingChannelStateOpen:
+ break;
+ case kARDSignalingChannelStateRegistered:
+ break;
+ case kARDSignalingChannelStateClosed:
+ case kARDSignalingChannelStateError:
+ // TODO(tkchin): reconnection scenarios. Right now we just disconnect
+ // completely if the websocket connection fails.
+ [self disconnect];
+ break;
+ }
+}
+
+#pragma mark - RTC_OBJC_TYPE(RTCPeerConnectionDelegate)
+// Callbacks for this delegate occur on non-main thread and need to be
+// dispatched back to main queue as needed.
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didChangeSignalingState:(RTCSignalingState)stateChanged {
+ RTCLog(@"Signaling state changed: %ld", (long)stateChanged);
+}
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didAddStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream {
+ RTCLog(@"Stream with %lu video tracks and %lu audio tracks was added.",
+ (unsigned long)stream.videoTracks.count,
+ (unsigned long)stream.audioTracks.count);
+}
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didStartReceivingOnTransceiver:(RTC_OBJC_TYPE(RTCRtpTransceiver) *)transceiver {
+ RTC_OBJC_TYPE(RTCMediaStreamTrack) *track = transceiver.receiver.track;
+ RTCLog(@"Now receiving %@ on track %@.", track.kind, track.trackId);
+}
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didRemoveStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream {
+ RTCLog(@"Stream was removed.");
+}
+
+- (void)peerConnectionShouldNegotiate:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection {
+ RTCLog(@"WARNING: Renegotiation needed but unimplemented.");
+}
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didChangeIceConnectionState:(RTCIceConnectionState)newState {
+ RTCLog(@"ICE state changed: %ld", (long)newState);
+ dispatch_async(dispatch_get_main_queue(), ^{
+ [self.delegate appClient:self didChangeConnectionState:newState];
+ });
+}
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didChangeConnectionState:(RTCPeerConnectionState)newState {
+ RTCLog(@"ICE+DTLS state changed: %ld", (long)newState);
+}
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didChangeIceGatheringState:(RTCIceGatheringState)newState {
+ RTCLog(@"ICE gathering state changed: %ld", (long)newState);
+}
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didGenerateIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate {
+ dispatch_async(dispatch_get_main_queue(), ^{
+ ARDICECandidateMessage *message =
+ [[ARDICECandidateMessage alloc] initWithCandidate:candidate];
+ [self sendSignalingMessage:message];
+ });
+}
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didFailToGatherIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidateErrorEvent) *)event {
+ RTCLog(@"Failed to gather ICE candidate. address: %@, port: %d, url: %@, errorCode: %d, "
+ @"errorText: %@",
+ event.address,
+ event.port,
+ event.url,
+ event.errorCode,
+ event.errorText);
+}
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didRemoveIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates {
+ dispatch_async(dispatch_get_main_queue(), ^{
+ ARDICECandidateRemovalMessage *message =
+ [[ARDICECandidateRemovalMessage alloc]
+ initWithRemovedCandidates:candidates];
+ [self sendSignalingMessage:message];
+ });
+}
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didChangeLocalCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)local
+ didChangeRemoteCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)remote
+ lastReceivedMs:(int)lastDataReceivedMs
+ didHaveReason:(NSString *)reason {
+ RTCLog(@"ICE candidate pair changed because: %@", reason);
+}
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didOpenDataChannel:(RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel {
+}
+
+#pragma mark - RTCSessionDescriptionDelegate
+// Callbacks for this delegate occur on non-main thread and need to be
+// dispatched back to main queue as needed.
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didCreateSessionDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp
+ error:(NSError *)error {
+ dispatch_async(dispatch_get_main_queue(), ^{
+ if (error) {
+ RTCLogError(@"Failed to create session description. Error: %@", error);
+ [self disconnect];
+ NSDictionary *userInfo = @{
+ NSLocalizedDescriptionKey: @"Failed to create session description.",
+ };
+ NSError *sdpError =
+ [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
+ code:kARDAppClientErrorCreateSDP
+ userInfo:userInfo];
+ [self.delegate appClient:self didError:sdpError];
+ return;
+ }
+ __weak ARDAppClient *weakSelf = self;
+ [self.peerConnection setLocalDescription:sdp
+ completionHandler:^(NSError *error) {
+ ARDAppClient *strongSelf = weakSelf;
+ [strongSelf peerConnection:strongSelf.peerConnection
+ didSetSessionDescriptionWithError:error];
+ }];
+ ARDSessionDescriptionMessage *message =
+ [[ARDSessionDescriptionMessage alloc] initWithDescription:sdp];
+ [self sendSignalingMessage:message];
+ [self setMaxBitrateForPeerConnectionVideoSender];
+ });
+}
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didSetSessionDescriptionWithError:(NSError *)error {
+ dispatch_async(dispatch_get_main_queue(), ^{
+ if (error) {
+ RTCLogError(@"Failed to set session description. Error: %@", error);
+ [self disconnect];
+ NSDictionary *userInfo = @{
+ NSLocalizedDescriptionKey: @"Failed to set session description.",
+ };
+ NSError *sdpError =
+ [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
+ code:kARDAppClientErrorSetSDP
+ userInfo:userInfo];
+ [self.delegate appClient:self didError:sdpError];
+ return;
+ }
+ // If we're answering and we've just set the remote offer we need to create
+ // an answer and set the local description.
+ if (!self.isInitiator && !self.peerConnection.localDescription) {
+ RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [self defaultAnswerConstraints];
+ __weak ARDAppClient *weakSelf = self;
+ [self.peerConnection
+ answerForConstraints:constraints
+ completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * sdp, NSError * error) {
+ ARDAppClient *strongSelf = weakSelf;
+ [strongSelf peerConnection:strongSelf.peerConnection
+ didCreateSessionDescription:sdp
+ error:error];
+ }];
+ }
+ });
+}
+
+#pragma mark - Private
+
+#if defined(WEBRTC_IOS)
+
+- (NSString *)documentsFilePathForFileName:(NSString *)fileName {
+ NSParameterAssert(fileName.length);
+ NSArray *paths = NSSearchPathForDirectoriesInDomains(
+ NSDocumentDirectory, NSUserDomainMask, YES);
+ NSString *documentsDirPath = paths.firstObject;
+ NSString *filePath =
+ [documentsDirPath stringByAppendingPathComponent:fileName];
+ return filePath;
+}
+
+#endif
+
+- (BOOL)hasJoinedRoomServerRoom {
+ return _clientId.length;
+}
+
+// Begins the peer connection connection process if we have both joined a room
+// on the room server and tried to obtain a TURN server. Otherwise does nothing.
+// A peer connection object will be created with a stream that contains local
+// audio and video capture. If this client is the caller, an offer is created as
+// well, otherwise the client will wait for an offer to arrive.
+- (void)startSignalingIfReady {
+ if (!_isTurnComplete || !self.hasJoinedRoomServerRoom) {
+ return;
+ }
+ self.state = kARDAppClientStateConnected;
+
+ // Create peer connection.
+ RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [self defaultPeerConnectionConstraints];
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ RTC_OBJC_TYPE(RTCCertificate) *pcert = [RTC_OBJC_TYPE(RTCCertificate)
+ generateCertificateWithParams:@{@"expires" : @100000, @"name" : @"RSASSA-PKCS1-v1_5"}];
+ config.iceServers = _iceServers;
+ config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
+ config.certificate = pcert;
+
+ _peerConnection = [_factory peerConnectionWithConfiguration:config
+ constraints:constraints
+ delegate:self];
+ // Create AV senders.
+ [self createMediaSenders];
+ if (_isInitiator) {
+ // Send offer.
+ __weak ARDAppClient *weakSelf = self;
+ [_peerConnection
+ offerForConstraints:[self defaultOfferConstraints]
+ completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * sdp, NSError * error) {
+ ARDAppClient *strongSelf = weakSelf;
+ [strongSelf peerConnection:strongSelf.peerConnection
+ didCreateSessionDescription:sdp
+ error:error];
+ }];
+ } else {
+ // Check if we've received an offer.
+ [self drainMessageQueueIfReady];
+ }
+#if defined(WEBRTC_IOS)
+ // Start event log.
+ if (kARDAppClientEnableRtcEventLog) {
+ NSString *filePath = [self documentsFilePathForFileName:@"webrtc-rtceventlog"];
+ if (![_peerConnection startRtcEventLogWithFilePath:filePath
+ maxSizeInBytes:kARDAppClientRtcEventLogMaxSizeInBytes]) {
+ RTCLogError(@"Failed to start event logging.");
+ }
+ }
+
+ // Start aecdump diagnostic recording.
+ if ([_settings currentCreateAecDumpSettingFromStore]) {
+ NSString *filePath = [self documentsFilePathForFileName:@"webrtc-audio.aecdump"];
+ if (![_factory startAecDumpWithFilePath:filePath
+ maxSizeInBytes:kARDAppClientAecDumpMaxSizeInBytes]) {
+ RTCLogError(@"Failed to start aec dump.");
+ }
+ }
+#endif
+}
+
+// Processes the messages that we've received from the room server and the
+// signaling channel. The offer or answer message must be processed before other
+// signaling messages, however they can arrive out of order. Hence, this method
+// only processes pending messages if there is a peer connection object and
+// if we have received either an offer or answer.
+- (void)drainMessageQueueIfReady {
+ if (!_peerConnection || !_hasReceivedSdp) {
+ return;
+ }
+ for (ARDSignalingMessage *message in _messageQueue) {
+ [self processSignalingMessage:message];
+ }
+ [_messageQueue removeAllObjects];
+}
+
+// Processes the given signaling message based on its type.
+- (void)processSignalingMessage:(ARDSignalingMessage *)message {
+ NSParameterAssert(_peerConnection ||
+ message.type == kARDSignalingMessageTypeBye);
+ switch (message.type) {
+ case kARDSignalingMessageTypeOffer:
+ case kARDSignalingMessageTypeAnswer: {
+ ARDSessionDescriptionMessage *sdpMessage =
+ (ARDSessionDescriptionMessage *)message;
+ RTC_OBJC_TYPE(RTCSessionDescription) *description = sdpMessage.sessionDescription;
+ __weak ARDAppClient *weakSelf = self;
+ [_peerConnection setRemoteDescription:description
+ completionHandler:^(NSError *error) {
+ ARDAppClient *strongSelf = weakSelf;
+ [strongSelf peerConnection:strongSelf.peerConnection
+ didSetSessionDescriptionWithError:error];
+ }];
+ break;
+ }
+ case kARDSignalingMessageTypeCandidate: {
+ ARDICECandidateMessage *candidateMessage =
+ (ARDICECandidateMessage *)message;
+ __weak ARDAppClient *weakSelf = self;
+ [_peerConnection addIceCandidate:candidateMessage.candidate
+ completionHandler:^(NSError *error) {
+ ARDAppClient *strongSelf = weakSelf;
+ if (error) {
+ [strongSelf.delegate appClient:strongSelf didError:error];
+ }
+ }];
+ break;
+ }
+ case kARDSignalingMessageTypeCandidateRemoval: {
+ ARDICECandidateRemovalMessage *candidateMessage =
+ (ARDICECandidateRemovalMessage *)message;
+ [_peerConnection removeIceCandidates:candidateMessage.candidates];
+ break;
+ }
+ case kARDSignalingMessageTypeBye:
+ // Other client disconnected.
+ // TODO(tkchin): support waiting in room for next client. For now just
+ // disconnect.
+ [self disconnect];
+ break;
+ }
+}
+
+// Sends a signaling message to the other client. The caller will send messages
+// through the room server, whereas the callee will send messages over the
+// signaling channel.
+- (void)sendSignalingMessage:(ARDSignalingMessage *)message {
+ if (_isInitiator) {
+ __weak ARDAppClient *weakSelf = self;
+ [_roomServerClient sendMessage:message
+ forRoomId:_roomId
+ clientId:_clientId
+ completionHandler:^(ARDMessageResponse *response,
+ NSError *error) {
+ ARDAppClient *strongSelf = weakSelf;
+ if (error) {
+ [strongSelf.delegate appClient:strongSelf didError:error];
+ return;
+ }
+ NSError *messageError =
+ [[strongSelf class] errorForMessageResultType:response.result];
+ if (messageError) {
+ [strongSelf.delegate appClient:strongSelf didError:messageError];
+ return;
+ }
+ }];
+ } else {
+ [_channel sendMessage:message];
+ }
+}
+
+- (void)setMaxBitrateForPeerConnectionVideoSender {
+ for (RTC_OBJC_TYPE(RTCRtpSender) * sender in _peerConnection.senders) {
+ if (sender.track != nil) {
+ if ([sender.track.kind isEqualToString:kARDVideoTrackKind]) {
+ [self setMaxBitrate:[_settings currentMaxBitrateSettingFromStore] forVideoSender:sender];
+ }
+ }
+ }
+}
+
+- (void)setMaxBitrate:(NSNumber *)maxBitrate forVideoSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender {
+ if (maxBitrate.intValue <= 0) {
+ return;
+ }
+
+ RTC_OBJC_TYPE(RTCRtpParameters) *parametersToModify = sender.parameters;
+ for (RTC_OBJC_TYPE(RTCRtpEncodingParameters) * encoding in parametersToModify.encodings) {
+ encoding.maxBitrateBps = @(maxBitrate.intValue * kKbpsMultiplier);
+ }
+ [sender setParameters:parametersToModify];
+}
+
+- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)videoTransceiver {
+ for (RTC_OBJC_TYPE(RTCRtpTransceiver) * transceiver in _peerConnection.transceivers) {
+ if (transceiver.mediaType == RTCRtpMediaTypeVideo) {
+ return transceiver;
+ }
+ }
+ return nil;
+}
+
+- (void)createMediaSenders {
+ RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [self defaultMediaAudioConstraints];
+ RTC_OBJC_TYPE(RTCAudioSource) *source = [_factory audioSourceWithConstraints:constraints];
+ RTC_OBJC_TYPE(RTCAudioTrack) *track = [_factory audioTrackWithSource:source
+ trackId:kARDAudioTrackId];
+ [_peerConnection addTrack:track streamIds:@[ kARDMediaStreamId ]];
+ _localVideoTrack = [self createLocalVideoTrack];
+ if (_localVideoTrack) {
+ [_peerConnection addTrack:_localVideoTrack streamIds:@[ kARDMediaStreamId ]];
+ [_delegate appClient:self didReceiveLocalVideoTrack:_localVideoTrack];
+ // We can set up rendering for the remote track right away since the transceiver already has an
+ // RTC_OBJC_TYPE(RTCRtpReceiver) with a track. The track will automatically get unmuted and
+ // produce frames once RTP is received.
+ RTC_OBJC_TYPE(RTCVideoTrack) *track =
+ (RTC_OBJC_TYPE(RTCVideoTrack) *)([self videoTransceiver].receiver.track);
+ [_delegate appClient:self didReceiveRemoteVideoTrack:track];
+ }
+}
+
+- (RTC_OBJC_TYPE(RTCVideoTrack) *)createLocalVideoTrack {
+ if ([_settings currentAudioOnlySettingFromStore]) {
+ return nil;
+ }
+
+ RTC_OBJC_TYPE(RTCVideoSource) *source = [_factory videoSource];
+
+#if !TARGET_IPHONE_SIMULATOR
+ if (self.isBroadcast) {
+ ARDExternalSampleCapturer *capturer =
+ [[ARDExternalSampleCapturer alloc] initWithDelegate:source];
+ [_delegate appClient:self didCreateLocalExternalSampleCapturer:capturer];
+ } else {
+ RTC_OBJC_TYPE(RTCCameraVideoCapturer) *capturer =
+ [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:source];
+ [_delegate appClient:self didCreateLocalCapturer:capturer];
+ }
+#else
+#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
+ if (@available(iOS 10, *)) {
+ RTC_OBJC_TYPE(RTCFileVideoCapturer) *fileCapturer =
+ [[RTC_OBJC_TYPE(RTCFileVideoCapturer) alloc] initWithDelegate:source];
+ [_delegate appClient:self didCreateLocalFileCapturer:fileCapturer];
+ }
+#endif
+#endif
+
+ return [_factory videoTrackWithSource:source trackId:kARDVideoTrackId];
+}
+
+#pragma mark - Collider methods
+
+- (void)registerWithColliderIfReady {
+ if (!self.hasJoinedRoomServerRoom) {
+ return;
+ }
+ // Open WebSocket connection.
+ if (!_channel) {
+ _channel =
+ [[ARDWebSocketChannel alloc] initWithURL:_websocketURL
+ restURL:_websocketRestURL
+ delegate:self];
+ if (_isLoopback) {
+ _loopbackChannel =
+ [[ARDLoopbackWebSocketChannel alloc] initWithURL:_websocketURL
+ restURL:_websocketRestURL];
+ }
+ }
+ [_channel registerForRoomId:_roomId clientId:_clientId];
+ if (_isLoopback) {
+ [_loopbackChannel registerForRoomId:_roomId clientId:@"LOOPBACK_CLIENT_ID"];
+ }
+}
+
+#pragma mark - Defaults
+
+- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultMediaAudioConstraints {
+ NSDictionary *mandatoryConstraints = @{};
+ RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:mandatoryConstraints
+ optionalConstraints:nil];
+ return constraints;
+}
+
+- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultAnswerConstraints {
+ return [self defaultOfferConstraints];
+}
+
+- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultOfferConstraints {
+ NSDictionary *mandatoryConstraints = @{
+ @"OfferToReceiveAudio" : @"true",
+ @"OfferToReceiveVideo" : @"true"
+ };
+ RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:mandatoryConstraints
+ optionalConstraints:nil];
+ return constraints;
+}
+
+- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultPeerConnectionConstraints {
+ if (_defaultPeerConnectionConstraints) {
+ return _defaultPeerConnectionConstraints;
+ }
+ NSString *value = _isLoopback ? @"false" : @"true";
+ NSDictionary *optionalConstraints = @{ @"DtlsSrtpKeyAgreement" : value };
+ RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil
+ optionalConstraints:optionalConstraints];
+ return constraints;
+}
+
+#pragma mark - Errors
+
++ (NSError *)errorForJoinResultType:(ARDJoinResultType)resultType {
+ NSError *error = nil;
+ switch (resultType) {
+ case kARDJoinResultTypeSuccess:
+ break;
+ case kARDJoinResultTypeUnknown: {
+ error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
+ code:kARDAppClientErrorUnknown
+ userInfo:@{
+ NSLocalizedDescriptionKey: @"Unknown error.",
+ }];
+ break;
+ }
+ case kARDJoinResultTypeFull: {
+ error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
+ code:kARDAppClientErrorRoomFull
+ userInfo:@{
+ NSLocalizedDescriptionKey: @"Room is full.",
+ }];
+ break;
+ }
+ }
+ return error;
+}
+
++ (NSError *)errorForMessageResultType:(ARDMessageResultType)resultType {
+ NSError *error = nil;
+ switch (resultType) {
+ case kARDMessageResultTypeSuccess:
+ break;
+ case kARDMessageResultTypeUnknown:
+ error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
+ code:kARDAppClientErrorUnknown
+ userInfo:@{
+ NSLocalizedDescriptionKey: @"Unknown error.",
+ }];
+ break;
+ case kARDMessageResultTypeInvalidClient:
+ error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
+ code:kARDAppClientErrorInvalidClient
+ userInfo:@{
+ NSLocalizedDescriptionKey: @"Invalid client.",
+ }];
+ break;
+ case kARDMessageResultTypeInvalidRoom:
+ error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
+ code:kARDAppClientErrorInvalidRoom
+ userInfo:@{
+ NSLocalizedDescriptionKey: @"Invalid room.",
+ }];
+ break;
+ }
+ return error;
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppEngineClient.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppEngineClient.h
new file mode 100644
index 0000000000..7514f3645c
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppEngineClient.h
@@ -0,0 +1,14 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDRoomServerClient.h"
+
+@interface ARDAppEngineClient : NSObject <ARDRoomServerClient>
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppEngineClient.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppEngineClient.m
new file mode 100644
index 0000000000..5139de60d6
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppEngineClient.m
@@ -0,0 +1,175 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDAppEngineClient.h"
+
+#import "sdk/objc/base/RTCLogging.h"
+
+#import "ARDJoinResponse.h"
+#import "ARDMessageResponse.h"
+#import "ARDSignalingMessage.h"
+#import "ARDUtilities.h"
+
+// TODO(tkchin): move these to a configuration object.
+static NSString * const kARDRoomServerHostUrl =
+ @"https://appr.tc";
+static NSString * const kARDRoomServerJoinFormat =
+ @"https://appr.tc/join/%@";
+static NSString * const kARDRoomServerJoinFormatLoopback =
+ @"https://appr.tc/join/%@?debug=loopback";
+static NSString * const kARDRoomServerMessageFormat =
+ @"https://appr.tc/message/%@/%@";
+static NSString * const kARDRoomServerLeaveFormat =
+ @"https://appr.tc/leave/%@/%@";
+
+static NSString * const kARDAppEngineClientErrorDomain = @"ARDAppEngineClient";
+static NSInteger const kARDAppEngineClientErrorBadResponse = -1;
+
+@implementation ARDAppEngineClient
+
+#pragma mark - ARDRoomServerClient
+
+- (void)joinRoomWithRoomId:(NSString *)roomId
+ isLoopback:(BOOL)isLoopback
+ completionHandler:(void (^)(ARDJoinResponse *response,
+ NSError *error))completionHandler {
+ NSParameterAssert(roomId.length);
+
+ NSString *urlString = nil;
+ if (isLoopback) {
+ urlString =
+ [NSString stringWithFormat:kARDRoomServerJoinFormatLoopback, roomId];
+ } else {
+ urlString =
+ [NSString stringWithFormat:kARDRoomServerJoinFormat, roomId];
+ }
+
+ NSURL *roomURL = [NSURL URLWithString:urlString];
+ RTCLog(@"Joining room:%@ on room server.", roomId);
+ NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:roomURL];
+ request.HTTPMethod = @"POST";
+ [NSURLConnection sendAsyncRequest:request
+ completionHandler:^(NSURLResponse *response, NSData *data, NSError *error) {
+ if (error) {
+ if (completionHandler) {
+ completionHandler(nil, error);
+ }
+ return;
+ }
+ ARDJoinResponse *joinResponse = [ARDJoinResponse responseFromJSONData:data];
+ if (!joinResponse) {
+ if (completionHandler) {
+ NSError *error = [[self class] badResponseError];
+ completionHandler(nil, error);
+ }
+ return;
+ }
+ if (completionHandler) {
+ completionHandler(joinResponse, nil);
+ }
+ }];
+}
+
+- (void)sendMessage:(ARDSignalingMessage *)message
+ forRoomId:(NSString *)roomId
+ clientId:(NSString *)clientId
+ completionHandler:(void (^)(ARDMessageResponse *response,
+ NSError *error))completionHandler {
+ NSParameterAssert(message);
+ NSParameterAssert(roomId.length);
+ NSParameterAssert(clientId.length);
+
+ NSData *data = [message JSONData];
+ NSString *urlString =
+ [NSString stringWithFormat:
+ kARDRoomServerMessageFormat, roomId, clientId];
+ NSURL *url = [NSURL URLWithString:urlString];
+ RTCLog(@"C->RS POST: %@", message);
+ NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
+ request.HTTPMethod = @"POST";
+ request.HTTPBody = data;
+ [NSURLConnection sendAsyncRequest:request
+ completionHandler:^(NSURLResponse *response,
+ NSData *data,
+ NSError *error) {
+ if (error) {
+ if (completionHandler) {
+ completionHandler(nil, error);
+ }
+ return;
+ }
+ ARDMessageResponse *messageResponse =
+ [ARDMessageResponse responseFromJSONData:data];
+ if (!messageResponse) {
+ if (completionHandler) {
+ NSError *error = [[self class] badResponseError];
+ completionHandler(nil, error);
+ }
+ return;
+ }
+ if (completionHandler) {
+ completionHandler(messageResponse, nil);
+ }
+ }];
+}
+
+- (void)leaveRoomWithRoomId:(NSString *)roomId
+ clientId:(NSString *)clientId
+ completionHandler:(void (^)(NSError *error))completionHandler {
+ NSParameterAssert(roomId.length);
+ NSParameterAssert(clientId.length);
+
+ NSString *urlString =
+ [NSString stringWithFormat:kARDRoomServerLeaveFormat, roomId, clientId];
+ NSURL *url = [NSURL URLWithString:urlString];
+ NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
+ request.HTTPMethod = @"POST";
+
+ RTCLog(@"C->RS: BYE");
+ __block NSError *error = nil;
+
+ // We want a synchronous request so that we know that we've left the room on
+ // room server before we do any further work.
+ dispatch_semaphore_t sem = dispatch_semaphore_create(0);
+ [NSURLConnection sendAsyncRequest:request
+ completionHandler:^(NSURLResponse *response, NSData *data, NSError *e) {
+ if (e) {
+ error = e;
+ }
+ dispatch_semaphore_signal(sem);
+ }];
+
+ dispatch_semaphore_wait(sem, DISPATCH_TIME_FOREVER);
+ if (error) {
+ RTCLogError(@"Error leaving room %@ on room server: %@", roomId, error.localizedDescription);
+ if (completionHandler) {
+ completionHandler(error);
+ }
+ return;
+ }
+ RTCLog(@"Left room:%@ on room server.", roomId);
+ if (completionHandler) {
+ completionHandler(nil);
+ }
+}
+
+#pragma mark - Private
+
++ (NSError *)badResponseError {
+ NSError *error =
+ [[NSError alloc] initWithDomain:kARDAppEngineClientErrorDomain
+ code:kARDAppEngineClientErrorBadResponse
+ userInfo:@{
+ NSLocalizedDescriptionKey: @"Error parsing response.",
+ }];
+ return error;
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDCaptureController.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDCaptureController.h
new file mode 100644
index 0000000000..4febccee96
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDCaptureController.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "sdk/objc/components/capturer/RTCCameraVideoCapturer.h"
+
+@class ARDSettingsModel;
+
+// Controls the camera. Handles starting the capture, switching cameras etc.
+@interface ARDCaptureController : NSObject
+
+- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer
+ settings:(ARDSettingsModel *)settings;
+- (void)startCapture;
+- (void)startCapture:(void (^)(NSError *))completion;
+- (void)stopCapture;
+- (void)switchCamera;
+- (void)switchCamera:(void (^)(NSError *))completion;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDCaptureController.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDCaptureController.m
new file mode 100644
index 0000000000..26cce9fdaa
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDCaptureController.m
@@ -0,0 +1,116 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDCaptureController.h"
+
+#import "sdk/objc/base/RTCLogging.h"
+
+#import "ARDSettingsModel.h"
+
+const Float64 kFramerateLimit = 30.0;
+
+@implementation ARDCaptureController {
+ RTC_OBJC_TYPE(RTCCameraVideoCapturer) * _capturer;
+ ARDSettingsModel *_settings;
+ BOOL _usingFrontCamera;
+}
+
+- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer
+ settings:(ARDSettingsModel *)settings {
+ if (self = [super init]) {
+ _capturer = capturer;
+ _settings = settings;
+ _usingFrontCamera = YES;
+ }
+
+ return self;
+}
+
+- (void)startCapture {
+ [self startCapture:nil];
+}
+
+- (void)startCapture:(void (^)(NSError *))completion {
+ AVCaptureDevicePosition position =
+ _usingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack;
+ AVCaptureDevice *device = [self findDeviceForPosition:position];
+ AVCaptureDeviceFormat *format = [self selectFormatForDevice:device];
+
+ if (format == nil) {
+ RTCLogError(@"No valid formats for device %@", device);
+ NSAssert(NO, @"");
+
+ return;
+ }
+
+ NSInteger fps = [self selectFpsForFormat:format];
+
+ [_capturer startCaptureWithDevice:device format:format fps:fps completionHandler:completion];
+}
+
+- (void)stopCapture {
+ [_capturer stopCapture];
+}
+
+- (void)switchCamera {
+ _usingFrontCamera = !_usingFrontCamera;
+ [self startCapture:nil];
+}
+
+- (void)switchCamera:(void (^)(NSError *))completion {
+ _usingFrontCamera = !_usingFrontCamera;
+ [self startCapture:completion];
+}
+
+#pragma mark - Private
+
+- (AVCaptureDevice *)findDeviceForPosition:(AVCaptureDevicePosition)position {
+ NSArray<AVCaptureDevice *> *captureDevices =
+ [RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices];
+ for (AVCaptureDevice *device in captureDevices) {
+ if (device.position == position) {
+ return device;
+ }
+ }
+ return captureDevices[0];
+}
+
+- (AVCaptureDeviceFormat *)selectFormatForDevice:(AVCaptureDevice *)device {
+ NSArray<AVCaptureDeviceFormat *> *formats =
+ [RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:device];
+ int targetWidth = [_settings currentVideoResolutionWidthFromStore];
+ int targetHeight = [_settings currentVideoResolutionHeightFromStore];
+ AVCaptureDeviceFormat *selectedFormat = nil;
+ int currentDiff = INT_MAX;
+
+ for (AVCaptureDeviceFormat *format in formats) {
+ CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
+ FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription);
+ int diff = abs(targetWidth - dimension.width) + abs(targetHeight - dimension.height);
+ if (diff < currentDiff) {
+ selectedFormat = format;
+ currentDiff = diff;
+ } else if (diff == currentDiff && pixelFormat == [_capturer preferredOutputPixelFormat]) {
+ selectedFormat = format;
+ }
+ }
+
+ return selectedFormat;
+}
+
+- (NSInteger)selectFpsForFormat:(AVCaptureDeviceFormat *)format {
+ Float64 maxSupportedFramerate = 0;
+ for (AVFrameRateRange *fpsRange in format.videoSupportedFrameRateRanges) {
+ maxSupportedFramerate = fmax(maxSupportedFramerate, fpsRange.maxFrameRate);
+ }
+ return fmin(maxSupportedFramerate, kFramerateLimit);
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.h
new file mode 100644
index 0000000000..7c32c4b509
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.h
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "sdk/objc/base/RTCVideoCapturer.h"
+
+@protocol ARDExternalSampleDelegate <NSObject>
+- (void)didCaptureSampleBuffer:(CMSampleBufferRef)sampleBuffer;
+@end
+
+@interface ARDExternalSampleCapturer : RTC_OBJC_TYPE
+(RTCVideoCapturer)<ARDExternalSampleDelegate> @end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.m
new file mode 100644
index 0000000000..8bf6716ddb
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.m
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDExternalSampleCapturer.h"
+
+#import "sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.h"
+#import "sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h"
+#import "sdk/objc/base/RTCI420Buffer.h"
+#import "sdk/objc/base/RTCMutableI420Buffer.h"
+#import "sdk/objc/base/RTCMutableYUVPlanarBuffer.h"
+#import "sdk/objc/base/RTCVideoFrameBuffer.h"
+#import "sdk/objc/base/RTCYUVPlanarBuffer.h"
+#import "sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.h"
+
+@implementation ARDExternalSampleCapturer
+
+- (instancetype)initWithDelegate:(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
+ return [super initWithDelegate:delegate];
+}
+
+#pragma mark - ARDExternalSampleDelegate
+
+- (void)didCaptureSampleBuffer:(CMSampleBufferRef)sampleBuffer {
+ if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
+ !CMSampleBufferDataIsReady(sampleBuffer)) {
+ return;
+ }
+
+ CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+ if (pixelBuffer == nil) {
+ return;
+ }
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer];
+ int64_t timeStampNs =
+ CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * NSEC_PER_SEC;
+ RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:timeStampNs];
+ [self.delegate capturer:self didCaptureVideoFrame:videoFrame];
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDJoinResponse+Internal.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDJoinResponse+Internal.h
new file mode 100644
index 0000000000..0edf7083c0
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDJoinResponse+Internal.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDJoinResponse.h"
+
+@interface ARDJoinResponse ()
+
+@property(nonatomic, assign) ARDJoinResultType result;
+@property(nonatomic, assign) BOOL isInitiator;
+@property(nonatomic, strong) NSString* roomId;
+@property(nonatomic, strong) NSString* clientId;
+@property(nonatomic, strong) NSArray* messages;
+@property(nonatomic, strong) NSURL* webSocketURL;
+@property(nonatomic, strong) NSURL* webSocketRestURL;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDJoinResponse.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDJoinResponse.h
new file mode 100644
index 0000000000..2911202af1
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDJoinResponse.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+typedef NS_ENUM(NSInteger, ARDJoinResultType) {
+ kARDJoinResultTypeUnknown,
+ kARDJoinResultTypeSuccess,
+ kARDJoinResultTypeFull
+};
+
+// Result of joining a room on the room server.
+@interface ARDJoinResponse : NSObject
+
+@property(nonatomic, readonly) ARDJoinResultType result;
+@property(nonatomic, readonly) BOOL isInitiator;
+@property(nonatomic, readonly) NSString *roomId;
+@property(nonatomic, readonly) NSString *clientId;
+@property(nonatomic, readonly) NSArray *messages;
+@property(nonatomic, readonly) NSURL *webSocketURL;
+@property(nonatomic, readonly) NSURL *webSocketRestURL;
+
++ (ARDJoinResponse *)responseFromJSONData:(NSData *)data;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDJoinResponse.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDJoinResponse.m
new file mode 100644
index 0000000000..87d58e0db1
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDJoinResponse.m
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDJoinResponse+Internal.h"
+
+#import "ARDSignalingMessage.h"
+#import "ARDUtilities.h"
+#import "RTCIceServer+JSON.h"
+
+static NSString const *kARDJoinResultKey = @"result";
+static NSString const *kARDJoinResultParamsKey = @"params";
+static NSString const *kARDJoinInitiatorKey = @"is_initiator";
+static NSString const *kARDJoinRoomIdKey = @"room_id";
+static NSString const *kARDJoinClientIdKey = @"client_id";
+static NSString const *kARDJoinMessagesKey = @"messages";
+static NSString const *kARDJoinWebSocketURLKey = @"wss_url";
+static NSString const *kARDJoinWebSocketRestURLKey = @"wss_post_url";
+
+@implementation ARDJoinResponse
+
+@synthesize result = _result;
+@synthesize isInitiator = _isInitiator;
+@synthesize roomId = _roomId;
+@synthesize clientId = _clientId;
+@synthesize messages = _messages;
+@synthesize webSocketURL = _webSocketURL;
+@synthesize webSocketRestURL = _webSocketRestURL;
+
++ (ARDJoinResponse *)responseFromJSONData:(NSData *)data {
+ NSDictionary *responseJSON = [NSDictionary dictionaryWithJSONData:data];
+ if (!responseJSON) {
+ return nil;
+ }
+ ARDJoinResponse *response = [[ARDJoinResponse alloc] init];
+ NSString *resultString = responseJSON[kARDJoinResultKey];
+ response.result = [[self class] resultTypeFromString:resultString];
+ NSDictionary *params = responseJSON[kARDJoinResultParamsKey];
+
+ response.isInitiator = [params[kARDJoinInitiatorKey] boolValue];
+ response.roomId = params[kARDJoinRoomIdKey];
+ response.clientId = params[kARDJoinClientIdKey];
+
+ // Parse messages.
+ NSArray *messages = params[kARDJoinMessagesKey];
+ NSMutableArray *signalingMessages =
+ [NSMutableArray arrayWithCapacity:messages.count];
+ for (NSString *message in messages) {
+ ARDSignalingMessage *signalingMessage =
+ [ARDSignalingMessage messageFromJSONString:message];
+ [signalingMessages addObject:signalingMessage];
+ }
+ response.messages = signalingMessages;
+
+ // Parse websocket urls.
+ NSString *webSocketURLString = params[kARDJoinWebSocketURLKey];
+ response.webSocketURL = [NSURL URLWithString:webSocketURLString];
+ NSString *webSocketRestURLString = params[kARDJoinWebSocketRestURLKey];
+ response.webSocketRestURL = [NSURL URLWithString:webSocketRestURLString];
+
+ return response;
+}
+
+#pragma mark - Private
+
++ (ARDJoinResultType)resultTypeFromString:(NSString *)resultString {
+ ARDJoinResultType result = kARDJoinResultTypeUnknown;
+ if ([resultString isEqualToString:@"SUCCESS"]) {
+ result = kARDJoinResultTypeSuccess;
+ } else if ([resultString isEqualToString:@"FULL"]) {
+ result = kARDJoinResultTypeFull;
+ }
+ return result;
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDMessageResponse+Internal.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDMessageResponse+Internal.h
new file mode 100644
index 0000000000..66ee76172f
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDMessageResponse+Internal.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDMessageResponse.h"
+
+@interface ARDMessageResponse ()
+
+@property(nonatomic, assign) ARDMessageResultType result;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDMessageResponse.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDMessageResponse.h
new file mode 100644
index 0000000000..65468cdf78
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDMessageResponse.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+typedef NS_ENUM(NSInteger, ARDMessageResultType) {
+ kARDMessageResultTypeUnknown,
+ kARDMessageResultTypeSuccess,
+ kARDMessageResultTypeInvalidRoom,
+ kARDMessageResultTypeInvalidClient
+};
+
+@interface ARDMessageResponse : NSObject
+
+@property(nonatomic, readonly) ARDMessageResultType result;
+
++ (ARDMessageResponse *)responseFromJSONData:(NSData *)data;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDMessageResponse.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDMessageResponse.m
new file mode 100644
index 0000000000..0f5383f6d6
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDMessageResponse.m
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDMessageResponse+Internal.h"
+
+#import "ARDUtilities.h"
+
+static NSString const *kARDMessageResultKey = @"result";
+
+@implementation ARDMessageResponse
+
+@synthesize result = _result;
+
++ (ARDMessageResponse *)responseFromJSONData:(NSData *)data {
+ NSDictionary *responseJSON = [NSDictionary dictionaryWithJSONData:data];
+ if (!responseJSON) {
+ return nil;
+ }
+ ARDMessageResponse *response = [[ARDMessageResponse alloc] init];
+ response.result =
+ [[self class] resultTypeFromString:responseJSON[kARDMessageResultKey]];
+ return response;
+}
+
+#pragma mark - Private
+
++ (ARDMessageResultType)resultTypeFromString:(NSString *)resultString {
+ ARDMessageResultType result = kARDMessageResultTypeUnknown;
+ if ([resultString isEqualToString:@"SUCCESS"]) {
+ result = kARDMessageResultTypeSuccess;
+ } else if ([resultString isEqualToString:@"INVALID_CLIENT"]) {
+ result = kARDMessageResultTypeInvalidClient;
+ } else if ([resultString isEqualToString:@"INVALID_ROOM"]) {
+ result = kARDMessageResultTypeInvalidRoom;
+ }
+ return result;
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDRoomServerClient.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDRoomServerClient.h
new file mode 100644
index 0000000000..3a5818d6d6
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDRoomServerClient.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+@class ARDJoinResponse;
+@class ARDMessageResponse;
+@class ARDSignalingMessage;
+
+@protocol ARDRoomServerClient <NSObject>
+
+- (void)joinRoomWithRoomId:(NSString *)roomId
+ isLoopback:(BOOL)isLoopback
+ completionHandler:(void (^)(ARDJoinResponse *response, NSError *error))completionHandler;
+
+- (void)sendMessage:(ARDSignalingMessage *)message
+ forRoomId:(NSString *)roomId
+ clientId:(NSString *)clientId
+ completionHandler:(void (^)(ARDMessageResponse *response, NSError *error))completionHandler;
+
+- (void)leaveRoomWithRoomId:(NSString *)roomId
+ clientId:(NSString *)clientId
+ completionHandler:(void (^)(NSError *error))completionHandler;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsModel+Private.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsModel+Private.h
new file mode 100644
index 0000000000..dc3f24ced8
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsModel+Private.h
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "ARDSettingsModel.h"
+
+@class ARDSettingsStore;
+
+NS_ASSUME_NONNULL_BEGIN
+@interface ARDSettingsModel ()
+- (ARDSettingsStore *)settingsStore;
+@end
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsModel.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsModel.h
new file mode 100644
index 0000000000..47c7defacd
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsModel.h
@@ -0,0 +1,123 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "sdk/objc/base/RTCVideoCodecInfo.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * Model class for user defined settings.
+ *
+ * Handles storing the settings and provides default values if setting is not
+ * set. Also provides list of available options for different settings. Stores
+ * for example video codec, video resolution and maximum bitrate.
+ */
+@interface ARDSettingsModel : NSObject
+
+/**
+ * Returns array of available capture resoultions.
+ *
+ * The capture resolutions are represented as strings in the following format
+ * [width]x[height]
+ */
+- (NSArray<NSString *> *)availableVideoResolutions;
+
+/**
+ * Returns current video resolution string.
+ * If no resolution is in store, default value of 640x480 is returned.
+ * When defaulting to value, the default is saved in store for consistency reasons.
+ */
+- (NSString *)currentVideoResolutionSettingFromStore;
+- (int)currentVideoResolutionWidthFromStore;
+- (int)currentVideoResolutionHeightFromStore;
+
+/**
+ * Stores the provided video resolution string into the store.
+ *
+ * If the provided resolution is no part of the available video resolutions
+ * the store operation will not be executed and NO will be returned.
+ * @param resolution the string to be stored.
+ * @return YES/NO depending on success.
+ */
+- (BOOL)storeVideoResolutionSetting:(NSString *)resolution;
+
+/**
+ * Returns array of available video codecs.
+ */
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)availableVideoCodecs;
+
+/**
+ * Returns current video codec setting from store if present or default (H264) otherwise.
+ */
+- (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)currentVideoCodecSettingFromStore;
+
+/**
+ * Stores the provided video codec setting into the store.
+ *
+ * If the provided video codec is not part of the available video codecs
+ * the store operation will not be executed and NO will be returned.
+ * @param video codec settings the string to be stored.
+ * @return YES/NO depending on success.
+ */
+- (BOOL)storeVideoCodecSetting:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)videoCodec;
+
+/**
+ * Returns current max bitrate setting from store if present.
+ */
+- (nullable NSNumber *)currentMaxBitrateSettingFromStore;
+
+/**
+ * Stores the provided bitrate value into the store.
+ *
+ * @param bitrate NSNumber representation of the max bitrate value.
+ */
+- (void)storeMaxBitrateSetting:(nullable NSNumber *)bitrate;
+
+/**
+ * Returns current audio only setting from store if present or default (NO) otherwise.
+ */
+- (BOOL)currentAudioOnlySettingFromStore;
+
+/**
+ * Stores the provided audio only setting into the store.
+ *
+ * @param setting the boolean value to be stored.
+ */
+- (void)storeAudioOnlySetting:(BOOL)audioOnly;
+
+/**
+ * Returns current create AecDump setting from store if present or default (NO) otherwise.
+ */
+- (BOOL)currentCreateAecDumpSettingFromStore;
+
+/**
+ * Stores the provided create AecDump setting into the store.
+ *
+ * @param setting the boolean value to be stored.
+ */
+- (void)storeCreateAecDumpSetting:(BOOL)createAecDump;
+
+/**
+ * Returns current setting whether to use manual audio config from store if present or default (YES)
+ * otherwise.
+ */
+- (BOOL)currentUseManualAudioConfigSettingFromStore;
+
+/**
+ * Stores the provided use manual audio config setting into the store.
+ *
+ * @param setting the boolean value to be stored.
+ */
+- (void)storeUseManualAudioConfigSetting:(BOOL)useManualAudioConfig;
+
+@end
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsModel.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsModel.m
new file mode 100644
index 0000000000..9e709b0553
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsModel.m
@@ -0,0 +1,211 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDSettingsModel+Private.h"
+#import "ARDSettingsStore.h"
+
+#import "sdk/objc/api/peerconnection/RTCMediaConstraints.h"
+#import "sdk/objc/components/capturer/RTCCameraVideoCapturer.h"
+#import "sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface ARDSettingsModel () {
+ ARDSettingsStore *_settingsStore;
+}
+@end
+
+@implementation ARDSettingsModel
+
+- (NSArray<NSString *> *)availableVideoResolutions {
+ NSMutableSet<NSArray<NSNumber *> *> *resolutions =
+ [[NSMutableSet<NSArray<NSNumber *> *> alloc] init];
+ for (AVCaptureDevice *device in [RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices]) {
+ for (AVCaptureDeviceFormat *format in
+ [RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:device]) {
+ CMVideoDimensions resolution =
+ CMVideoFormatDescriptionGetDimensions(format.formatDescription);
+ NSArray<NSNumber *> *resolutionObject = @[ @(resolution.width), @(resolution.height) ];
+ [resolutions addObject:resolutionObject];
+ }
+ }
+
+ NSArray<NSArray<NSNumber *> *> *sortedResolutions =
+ [[resolutions allObjects] sortedArrayUsingComparator:^NSComparisonResult(
+ NSArray<NSNumber *> *obj1, NSArray<NSNumber *> *obj2) {
+ NSComparisonResult cmp = [obj1.firstObject compare:obj2.firstObject];
+ if (cmp != NSOrderedSame) {
+ return cmp;
+ }
+ return [obj1.lastObject compare:obj2.lastObject];
+ }];
+
+ NSMutableArray<NSString *> *resolutionStrings = [[NSMutableArray<NSString *> alloc] init];
+ for (NSArray<NSNumber *> *resolution in sortedResolutions) {
+ NSString *resolutionString =
+ [NSString stringWithFormat:@"%@x%@", resolution.firstObject, resolution.lastObject];
+ [resolutionStrings addObject:resolutionString];
+ }
+
+ return [resolutionStrings copy];
+}
+
+- (NSString *)currentVideoResolutionSettingFromStore {
+ [self registerStoreDefaults];
+ return [[self settingsStore] videoResolution];
+}
+
+- (BOOL)storeVideoResolutionSetting:(NSString *)resolution {
+ if (![[self availableVideoResolutions] containsObject:resolution]) {
+ return NO;
+ }
+ [[self settingsStore] setVideoResolution:resolution];
+ return YES;
+}
+
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)availableVideoCodecs {
+ return [RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) supportedCodecs];
+}
+
+- (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)currentVideoCodecSettingFromStore {
+ [self registerStoreDefaults];
+ NSData *codecData = [[self settingsStore] videoCodec];
+#if defined(WEBRTC_IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= __MAC_10_13
+ Class expectedClass = [RTC_OBJC_TYPE(RTCVideoCodecInfo) class];
+ NSError *error;
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *videoCodecSetting =
+ [NSKeyedUnarchiver unarchivedObjectOfClass:expectedClass fromData:codecData error:&error];
+ if (!error) {
+ return videoCodecSetting;
+ }
+ return nil;
+#else
+ return [NSKeyedUnarchiver unarchiveObjectWithData:codecData];
+#endif
+}
+
+- (BOOL)storeVideoCodecSetting:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)videoCodec {
+ if (![[self availableVideoCodecs] containsObject:videoCodec]) {
+ return NO;
+ }
+
+#if defined(WEBRTC_IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= __MAC_10_13
+ NSError *error;
+ NSData *codecData = [NSKeyedArchiver archivedDataWithRootObject:videoCodec
+ requiringSecureCoding:NO
+ error:&error];
+ if (error) {
+ return NO;
+ }
+#else
+ NSData *codecData = [NSKeyedArchiver archivedDataWithRootObject:videoCodec];
+#endif
+
+ [[self settingsStore] setVideoCodec:codecData];
+ return YES;
+}
+
+- (nullable NSNumber *)currentMaxBitrateSettingFromStore {
+ [self registerStoreDefaults];
+ return [[self settingsStore] maxBitrate];
+}
+
+- (void)storeMaxBitrateSetting:(nullable NSNumber *)bitrate {
+ [[self settingsStore] setMaxBitrate:bitrate];
+}
+
+- (BOOL)currentAudioOnlySettingFromStore {
+ return [[self settingsStore] audioOnly];
+}
+
+- (void)storeAudioOnlySetting:(BOOL)audioOnly {
+ [[self settingsStore] setAudioOnly:audioOnly];
+}
+
+- (BOOL)currentCreateAecDumpSettingFromStore {
+ return [[self settingsStore] createAecDump];
+}
+
+- (void)storeCreateAecDumpSetting:(BOOL)createAecDump {
+ [[self settingsStore] setCreateAecDump:createAecDump];
+}
+
+- (BOOL)currentUseManualAudioConfigSettingFromStore {
+ return [[self settingsStore] useManualAudioConfig];
+}
+
+- (void)storeUseManualAudioConfigSetting:(BOOL)useManualAudioConfig {
+ [[self settingsStore] setUseManualAudioConfig:useManualAudioConfig];
+}
+
+#pragma mark - Testable
+
+- (ARDSettingsStore *)settingsStore {
+ if (!_settingsStore) {
+ _settingsStore = [[ARDSettingsStore alloc] init];
+ [self registerStoreDefaults];
+ }
+ return _settingsStore;
+}
+
+- (int)currentVideoResolutionWidthFromStore {
+ NSString *resolution = [self currentVideoResolutionSettingFromStore];
+
+ return [self videoResolutionComponentAtIndex:0 inString:resolution];
+}
+
+- (int)currentVideoResolutionHeightFromStore {
+ NSString *resolution = [self currentVideoResolutionSettingFromStore];
+ return [self videoResolutionComponentAtIndex:1 inString:resolution];
+}
+
+#pragma mark -
+
+- (NSString *)defaultVideoResolutionSetting {
+ return [self availableVideoResolutions].firstObject;
+}
+
+- (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)defaultVideoCodecSetting {
+ return [self availableVideoCodecs].firstObject;
+}
+
+- (int)videoResolutionComponentAtIndex:(int)index inString:(NSString *)resolution {
+ if (index != 0 && index != 1) {
+ return 0;
+ }
+ NSArray<NSString *> *components = [resolution componentsSeparatedByString:@"x"];
+ if (components.count != 2) {
+ return 0;
+ }
+ return components[index].intValue;
+}
+
+- (void)registerStoreDefaults {
+#if defined(WEBRTC_IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= __MAC_10_13
+ NSError *error;
+ NSData *codecData = [NSKeyedArchiver archivedDataWithRootObject:[self defaultVideoCodecSetting]
+ requiringSecureCoding:NO
+ error:&error];
+ if (error) {
+ return;
+ }
+#else
+ NSData *codecData = [NSKeyedArchiver archivedDataWithRootObject:[self defaultVideoCodecSetting]];
+#endif
+
+ [ARDSettingsStore setDefaultsForVideoResolution:[self defaultVideoResolutionSetting]
+ videoCodec:codecData
+ bitrate:nil
+ audioOnly:NO
+ createAecDump:NO
+ useManualAudioConfig:YES];
+}
+@end
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsStore.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsStore.h
new file mode 100644
index 0000000000..bb051dbb26
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsStore.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * Light-weight persistent store for user settings.
+ *
+ * It will persist between application launches and application updates.
+ */
+@interface ARDSettingsStore : NSObject
+
+/**
+ * Set fallback values in case the setting has not been written by the user.
+ * @param dictionary of values to store
+ */
++ (void)setDefaultsForVideoResolution:(NSString *)videoResolution
+ videoCodec:(NSData *)videoCodec
+ bitrate:(nullable NSNumber *)bitrate
+ audioOnly:(BOOL)audioOnly
+ createAecDump:(BOOL)createAecDump
+ useManualAudioConfig:(BOOL)useManualAudioConfig;
+
+@property(nonatomic) NSString *videoResolution;
+@property(nonatomic) NSData *videoCodec;
+
+/**
+ * Returns current max bitrate number stored in the store.
+ */
+- (nullable NSNumber *)maxBitrate;
+
+/**
+ * Stores the provided value as maximum bitrate setting.
+ * @param value the number to be stored
+ */
+- (void)setMaxBitrate:(nullable NSNumber *)value;
+
+@property(nonatomic) BOOL audioOnly;
+@property(nonatomic) BOOL createAecDump;
+@property(nonatomic) BOOL useManualAudioConfig;
+
+@end
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsStore.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsStore.m
new file mode 100644
index 0000000000..a3713e2f0e
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsStore.m
@@ -0,0 +1,115 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDSettingsStore.h"
+
+static NSString *const kVideoResolutionKey = @"rtc_video_resolution_key";
+static NSString *const kVideoCodecKey = @"rtc_video_codec_info_key";
+static NSString *const kBitrateKey = @"rtc_max_bitrate_key";
+static NSString *const kAudioOnlyKey = @"rtc_audio_only_key";
+static NSString *const kCreateAecDumpKey = @"rtc_create_aec_dump_key";
+static NSString *const kUseManualAudioConfigKey = @"rtc_use_manual_audio_config_key";
+
+NS_ASSUME_NONNULL_BEGIN
+@interface ARDSettingsStore () {
+ NSUserDefaults *_storage;
+}
+@property(nonatomic, strong, readonly) NSUserDefaults *storage;
+@end
+
+@implementation ARDSettingsStore
+
++ (void)setDefaultsForVideoResolution:(NSString *)videoResolution
+ videoCodec:(NSData *)videoCodec
+ bitrate:(nullable NSNumber *)bitrate
+ audioOnly:(BOOL)audioOnly
+ createAecDump:(BOOL)createAecDump
+ useManualAudioConfig:(BOOL)useManualAudioConfig {
+ NSMutableDictionary<NSString *, id> *defaultsDictionary = [@{
+ kAudioOnlyKey : @(audioOnly),
+ kCreateAecDumpKey : @(createAecDump),
+ kUseManualAudioConfigKey : @(useManualAudioConfig)
+ } mutableCopy];
+
+ if (videoResolution) {
+ defaultsDictionary[kVideoResolutionKey] = videoResolution;
+ }
+ if (videoCodec) {
+ defaultsDictionary[kVideoCodecKey] = videoCodec;
+ }
+ if (bitrate) {
+ defaultsDictionary[kBitrateKey] = bitrate;
+ }
+ [[NSUserDefaults standardUserDefaults] registerDefaults:defaultsDictionary];
+}
+
+- (NSUserDefaults *)storage {
+ if (!_storage) {
+ _storage = [NSUserDefaults standardUserDefaults];
+ }
+ return _storage;
+}
+
+- (NSString *)videoResolution {
+ return [self.storage objectForKey:kVideoResolutionKey];
+}
+
+- (void)setVideoResolution:(NSString *)resolution {
+ [self.storage setObject:resolution forKey:kVideoResolutionKey];
+ [self.storage synchronize];
+}
+
+- (NSData *)videoCodec {
+ return [self.storage objectForKey:kVideoCodecKey];
+}
+
+- (void)setVideoCodec:(NSData *)videoCodec {
+ [self.storage setObject:videoCodec forKey:kVideoCodecKey];
+ [self.storage synchronize];
+}
+
+- (nullable NSNumber *)maxBitrate {
+ return [self.storage objectForKey:kBitrateKey];
+}
+
+- (void)setMaxBitrate:(nullable NSNumber *)value {
+ [self.storage setObject:value forKey:kBitrateKey];
+ [self.storage synchronize];
+}
+
+- (BOOL)audioOnly {
+ return [self.storage boolForKey:kAudioOnlyKey];
+}
+
+- (void)setAudioOnly:(BOOL)audioOnly {
+ [self.storage setBool:audioOnly forKey:kAudioOnlyKey];
+ [self.storage synchronize];
+}
+
+- (BOOL)createAecDump {
+ return [self.storage boolForKey:kCreateAecDumpKey];
+}
+
+- (void)setCreateAecDump:(BOOL)createAecDump {
+ [self.storage setBool:createAecDump forKey:kCreateAecDumpKey];
+ [self.storage synchronize];
+}
+
+- (BOOL)useManualAudioConfig {
+ return [self.storage boolForKey:kUseManualAudioConfigKey];
+}
+
+- (void)setUseManualAudioConfig:(BOOL)useManualAudioConfig {
+ [self.storage setBool:useManualAudioConfig forKey:kUseManualAudioConfigKey];
+ [self.storage synchronize];
+}
+
+@end
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSignalingChannel.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSignalingChannel.h
new file mode 100644
index 0000000000..396b117b17
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSignalingChannel.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "ARDSignalingMessage.h"
+
+typedef NS_ENUM(NSInteger, ARDSignalingChannelState) {
+ // State when disconnected.
+ kARDSignalingChannelStateClosed,
+ // State when connection is established but not ready for use.
+ kARDSignalingChannelStateOpen,
+ // State when connection is established and registered.
+ kARDSignalingChannelStateRegistered,
+ // State when connection encounters a fatal error.
+ kARDSignalingChannelStateError
+};
+
+@protocol ARDSignalingChannel;
+@protocol ARDSignalingChannelDelegate <NSObject>
+
+- (void)channel:(id<ARDSignalingChannel>)channel didChangeState:(ARDSignalingChannelState)state;
+
+- (void)channel:(id<ARDSignalingChannel>)channel didReceiveMessage:(ARDSignalingMessage *)message;
+
+@end
+
+@protocol ARDSignalingChannel <NSObject>
+
+@property(nonatomic, readonly) NSString *roomId;
+@property(nonatomic, readonly) NSString *clientId;
+@property(nonatomic, readonly) ARDSignalingChannelState state;
+@property(nonatomic, weak) id<ARDSignalingChannelDelegate> delegate;
+
+// Registers the channel for the given room and client id.
+- (void)registerForRoomId:(NSString *)roomId clientId:(NSString *)clientId;
+
+// Sends signaling message over the channel.
+- (void)sendMessage:(ARDSignalingMessage *)message;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSignalingMessage.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSignalingMessage.h
new file mode 100644
index 0000000000..ac19e8fba7
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSignalingMessage.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "sdk/objc/api/peerconnection/RTCIceCandidate.h"
+#import "sdk/objc/api/peerconnection/RTCSessionDescription.h"
+
+typedef enum {
+ kARDSignalingMessageTypeCandidate,
+ kARDSignalingMessageTypeCandidateRemoval,
+ kARDSignalingMessageTypeOffer,
+ kARDSignalingMessageTypeAnswer,
+ kARDSignalingMessageTypeBye,
+} ARDSignalingMessageType;
+
+@interface ARDSignalingMessage : NSObject
+
+@property(nonatomic, readonly) ARDSignalingMessageType type;
+
++ (ARDSignalingMessage *)messageFromJSONString:(NSString *)jsonString;
+- (NSData *)JSONData;
+
+@end
+
+@interface ARDICECandidateMessage : ARDSignalingMessage
+
+@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCIceCandidate) * candidate;
+
+- (instancetype)initWithCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate;
+
+@end
+
+@interface ARDICECandidateRemovalMessage : ARDSignalingMessage
+
+@property(nonatomic, readonly) NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *candidates;
+
+- (instancetype)initWithRemovedCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates;
+
+@end
+
+@interface ARDSessionDescriptionMessage : ARDSignalingMessage
+
+@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCSessionDescription) * sessionDescription;
+
+- (instancetype)initWithDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)description;
+
+@end
+
+@interface ARDByeMessage : ARDSignalingMessage
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSignalingMessage.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSignalingMessage.m
new file mode 100644
index 0000000000..049c0f5b0a
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSignalingMessage.m
@@ -0,0 +1,160 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDSignalingMessage.h"
+
+#import "sdk/objc/base/RTCLogging.h"
+
+#import "ARDUtilities.h"
+#import "RTCIceCandidate+JSON.h"
+#import "RTCSessionDescription+JSON.h"
+
+static NSString * const kARDSignalingMessageTypeKey = @"type";
+static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
+
+@implementation ARDSignalingMessage
+
+@synthesize type = _type;
+
+- (instancetype)initWithType:(ARDSignalingMessageType)type {
+ if (self = [super init]) {
+ _type = type;
+ }
+ return self;
+}
+
+- (NSString *)description {
+ return [[NSString alloc] initWithData:[self JSONData]
+ encoding:NSUTF8StringEncoding];
+}
+
++ (ARDSignalingMessage *)messageFromJSONString:(NSString *)jsonString {
+ NSDictionary *values = [NSDictionary dictionaryWithJSONString:jsonString];
+ if (!values) {
+ RTCLogError(@"Error parsing signaling message JSON.");
+ return nil;
+ }
+
+ NSString *typeString = values[kARDSignalingMessageTypeKey];
+ ARDSignalingMessage *message = nil;
+ if ([typeString isEqualToString:@"candidate"]) {
+ RTC_OBJC_TYPE(RTCIceCandidate) *candidate =
+ [RTC_OBJC_TYPE(RTCIceCandidate) candidateFromJSONDictionary:values];
+ message = [[ARDICECandidateMessage alloc] initWithCandidate:candidate];
+ } else if ([typeString isEqualToString:kARDTypeValueRemoveCandidates]) {
+ RTCLogInfo(@"Received remove-candidates message");
+ NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *candidates =
+ [RTC_OBJC_TYPE(RTCIceCandidate) candidatesFromJSONDictionary:values];
+ message = [[ARDICECandidateRemovalMessage alloc]
+ initWithRemovedCandidates:candidates];
+ } else if ([typeString isEqualToString:@"offer"] ||
+ [typeString isEqualToString:@"answer"]) {
+ RTC_OBJC_TYPE(RTCSessionDescription) *description =
+ [RTC_OBJC_TYPE(RTCSessionDescription) descriptionFromJSONDictionary:values];
+ message =
+ [[ARDSessionDescriptionMessage alloc] initWithDescription:description];
+ } else if ([typeString isEqualToString:@"bye"]) {
+ message = [[ARDByeMessage alloc] init];
+ } else {
+ RTCLogError(@"Unexpected type: %@", typeString);
+ }
+ return message;
+}
+
+- (NSData *)JSONData {
+ return nil;
+}
+
+@end
+
+@implementation ARDICECandidateMessage
+
+@synthesize candidate = _candidate;
+
+- (instancetype)initWithCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate {
+ if (self = [super initWithType:kARDSignalingMessageTypeCandidate]) {
+ _candidate = candidate;
+ }
+ return self;
+}
+
+- (NSData *)JSONData {
+ return [_candidate JSONData];
+}
+
+@end
+
+@implementation ARDICECandidateRemovalMessage
+
+@synthesize candidates = _candidates;
+
+- (instancetype)initWithRemovedCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates {
+ NSParameterAssert(candidates.count);
+ if (self = [super initWithType:kARDSignalingMessageTypeCandidateRemoval]) {
+ _candidates = candidates;
+ }
+ return self;
+}
+
+- (NSData *)JSONData {
+ return [RTC_OBJC_TYPE(RTCIceCandidate) JSONDataForIceCandidates:_candidates
+ withType:kARDTypeValueRemoveCandidates];
+}
+
+@end
+
+@implementation ARDSessionDescriptionMessage
+
+@synthesize sessionDescription = _sessionDescription;
+
+- (instancetype)initWithDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)description {
+ ARDSignalingMessageType messageType = kARDSignalingMessageTypeOffer;
+ RTCSdpType sdpType = description.type;
+ switch (sdpType) {
+ case RTCSdpTypeOffer:
+ messageType = kARDSignalingMessageTypeOffer;
+ break;
+ case RTCSdpTypeAnswer:
+ messageType = kARDSignalingMessageTypeAnswer;
+ break;
+ case RTCSdpTypePrAnswer:
+ case RTCSdpTypeRollback:
+ NSAssert(
+ NO, @"Unexpected type: %@", [RTC_OBJC_TYPE(RTCSessionDescription) stringForType:sdpType]);
+ break;
+ }
+ if (self = [super initWithType:messageType]) {
+ _sessionDescription = description;
+ }
+ return self;
+}
+
+- (NSData *)JSONData {
+ return [_sessionDescription JSONData];
+}
+
+@end
+
+@implementation ARDByeMessage
+
+- (instancetype)init {
+ return [super initWithType:kARDSignalingMessageTypeBye];
+}
+
+- (NSData *)JSONData {
+ NSDictionary *message = @{
+ @"type": @"bye"
+ };
+ return [NSJSONSerialization dataWithJSONObject:message
+ options:NSJSONWritingPrettyPrinted
+ error:NULL];
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDStatsBuilder.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDStatsBuilder.h
new file mode 100644
index 0000000000..eaffa67049
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDStatsBuilder.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "sdk/objc/api/peerconnection/RTCStatisticsReport.h"
+#import "sdk/objc/base/RTCMacros.h"
+
+/** Class used to accumulate stats information into a single displayable string.
+ */
+@interface ARDStatsBuilder : NSObject
+
+/** String that represents the accumulated stats reports passed into this
+ * class.
+ */
+@property(nonatomic, readonly) NSString *statsString;
+@property(nonatomic) RTC_OBJC_TYPE(RTCStatisticsReport) * stats;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDStatsBuilder.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDStatsBuilder.m
new file mode 100644
index 0000000000..7ebf9fb1c7
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDStatsBuilder.m
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDStatsBuilder.h"
+
+#import "sdk/objc/api/peerconnection/RTCLegacyStatsReport.h"
+#import "sdk/objc/base/RTCMacros.h"
+
+#import "ARDUtilities.h"
+
+@implementation ARDStatsBuilder
+
+@synthesize stats = _stats;
+
+- (NSString *)statsString {
+ NSMutableString *result = [NSMutableString string];
+
+ [result appendFormat:@"(cpu)%ld%%\n", (long)ARDGetCpuUsagePercentage()];
+
+ for (NSString *key in _stats.statistics) {
+ RTC_OBJC_TYPE(RTCStatistics) *stat = _stats.statistics[key];
+ [result appendFormat:@"%@\n", stat.description];
+ }
+
+ return result;
+}
+
+@end
+
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDTURNClient+Internal.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDTURNClient+Internal.h
new file mode 100644
index 0000000000..3a579f8f7f
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDTURNClient+Internal.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDTURNClient.h"
+
+@interface ARDTURNClient : NSObject <ARDTURNClient>
+
+- (instancetype)initWithURL:(NSURL *)url;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDTURNClient.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDTURNClient.h
new file mode 100644
index 0000000000..0399736f03
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDTURNClient.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "sdk/objc/base/RTCMacros.h"
+
+@class RTC_OBJC_TYPE(RTCIceServer);
+
+@protocol ARDTURNClient <NSObject>
+
+// Returns TURN server urls if successful.
+- (void)requestServersWithCompletionHandler:(void (^)(NSArray *turnServers,
+ NSError *error))completionHandler;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDTURNClient.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDTURNClient.m
new file mode 100644
index 0000000000..069231cd7e
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDTURNClient.m
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDTURNClient+Internal.h"
+
+#import "ARDUtilities.h"
+#import "RTCIceServer+JSON.h"
+
+// TODO(tkchin): move this to a configuration object.
+static NSString *kTURNRefererURLString = @"https://appr.tc";
+static NSString *kARDTURNClientErrorDomain = @"ARDTURNClient";
+static NSInteger kARDTURNClientErrorBadResponse = -1;
+
+@implementation ARDTURNClient {
+ NSURL *_url;
+}
+
+- (instancetype)initWithURL:(NSURL *)url {
+ NSParameterAssert([url absoluteString].length);
+ if (self = [super init]) {
+ _url = url;
+ }
+ return self;
+}
+
+- (void)requestServersWithCompletionHandler:
+ (void (^)(NSArray *turnServers, NSError *error))completionHandler {
+
+ NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:_url];
+ [NSURLConnection sendAsyncRequest:request
+ completionHandler:^(NSURLResponse *response, NSData *data, NSError *error) {
+ if (error) {
+ completionHandler(nil, error);
+ return;
+ }
+ NSDictionary *responseDict = [NSDictionary dictionaryWithJSONData:data];
+ NSString *iceServerUrl = responseDict[@"ice_server_url"];
+ [self makeTurnServerRequestToURL:[NSURL URLWithString:iceServerUrl]
+ WithCompletionHandler:completionHandler];
+ }];
+}
+
+#pragma mark - Private
+
+- (void)makeTurnServerRequestToURL:(NSURL *)url
+ WithCompletionHandler:(void (^)(NSArray *turnServers,
+ NSError *error))completionHandler {
+ NSMutableURLRequest *iceServerRequest = [NSMutableURLRequest requestWithURL:url];
+ iceServerRequest.HTTPMethod = @"POST";
+ [iceServerRequest addValue:kTURNRefererURLString forHTTPHeaderField:@"referer"];
+ [NSURLConnection sendAsyncRequest:iceServerRequest
+ completionHandler:^(NSURLResponse *response,
+ NSData *data,
+ NSError *error) {
+ if (error) {
+ completionHandler(nil, error);
+ return;
+ }
+ NSDictionary *turnResponseDict = [NSDictionary dictionaryWithJSONData:data];
+ NSMutableArray *turnServers = [NSMutableArray array];
+ [turnResponseDict[@"iceServers"]
+ enumerateObjectsUsingBlock:^(NSDictionary *obj, NSUInteger idx, BOOL *stop) {
+ [turnServers addObject:[RTC_OBJC_TYPE(RTCIceServer) serverFromJSONDictionary:obj]];
+ }];
+ if (!turnServers) {
+ NSError *responseError =
+ [[NSError alloc] initWithDomain:kARDTURNClientErrorDomain
+ code:kARDTURNClientErrorBadResponse
+ userInfo:@{
+ NSLocalizedDescriptionKey: @"Bad TURN response.",
+ }];
+ completionHandler(nil, responseError);
+ return;
+ }
+ completionHandler(turnServers, nil);
+ }];
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDWebSocketChannel.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDWebSocketChannel.h
new file mode 100644
index 0000000000..81888e6e83
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDWebSocketChannel.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "ARDSignalingChannel.h"
+
+// Wraps a WebSocket connection to the AppRTC WebSocket server.
+@interface ARDWebSocketChannel : NSObject <ARDSignalingChannel>
+
+- (instancetype)initWithURL:(NSURL *)url
+ restURL:(NSURL *)restURL
+ delegate:(id<ARDSignalingChannelDelegate>)delegate;
+
+// Registers with the WebSocket server for the given room and client id once
+// the web socket connection is open.
+- (void)registerForRoomId:(NSString *)roomId clientId:(NSString *)clientId;
+
+// Sends message over the WebSocket connection if registered, otherwise POSTs to
+// the web socket server instead.
+- (void)sendMessage:(ARDSignalingMessage *)message;
+
+@end
+
+// Loopback mode is used to cause the client to connect to itself for testing.
+// A second web socket connection is established simulating the other client.
+// Any messages received are sent back to the WebSocket server after modifying
+// them as appropriate.
+@interface ARDLoopbackWebSocketChannel : ARDWebSocketChannel
+
+- (instancetype)initWithURL:(NSURL *)url restURL:(NSURL *)restURL;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDWebSocketChannel.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDWebSocketChannel.m
new file mode 100644
index 0000000000..bbb0bf87f8
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDWebSocketChannel.m
@@ -0,0 +1,252 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDWebSocketChannel.h"
+
+#import "sdk/objc/base/RTCLogging.h"
+
+#import "SRWebSocket.h"
+
+#import "ARDSignalingMessage.h"
+#import "ARDUtilities.h"
+
+// TODO(tkchin): move these to a configuration object.
+static NSString const *kARDWSSMessageErrorKey = @"error";
+static NSString const *kARDWSSMessagePayloadKey = @"msg";
+
+@interface ARDWebSocketChannel () <SRWebSocketDelegate>
+@end
+
+@implementation ARDWebSocketChannel {
+ NSURL *_url;
+ NSURL *_restURL;
+ SRWebSocket *_socket;
+}
+
+@synthesize delegate = _delegate;
+@synthesize state = _state;
+@synthesize roomId = _roomId;
+@synthesize clientId = _clientId;
+
+- (instancetype)initWithURL:(NSURL *)url
+ restURL:(NSURL *)restURL
+ delegate:(id<ARDSignalingChannelDelegate>)delegate {
+ if (self = [super init]) {
+ _url = url;
+ _restURL = restURL;
+ _delegate = delegate;
+ _socket = [[SRWebSocket alloc] initWithURL:url];
+ _socket.delegate = self;
+ RTCLog(@"Opening WebSocket.");
+ [_socket open];
+ }
+ return self;
+}
+
+- (void)dealloc {
+ [self disconnect];
+}
+
+- (void)setState:(ARDSignalingChannelState)state {
+ if (_state == state) {
+ return;
+ }
+ _state = state;
+ [_delegate channel:self didChangeState:_state];
+}
+
+- (void)registerForRoomId:(NSString *)roomId
+ clientId:(NSString *)clientId {
+ NSParameterAssert(roomId.length);
+ NSParameterAssert(clientId.length);
+ _roomId = roomId;
+ _clientId = clientId;
+ if (_state == kARDSignalingChannelStateOpen) {
+ [self registerWithCollider];
+ }
+}
+
+- (void)sendMessage:(ARDSignalingMessage *)message {
+ NSParameterAssert(_clientId.length);
+ NSParameterAssert(_roomId.length);
+ NSData *data = [message JSONData];
+ if (_state == kARDSignalingChannelStateRegistered) {
+ NSString *payload =
+ [[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding];
+ NSDictionary *message = @{
+ @"cmd": @"send",
+ @"msg": payload,
+ };
+ NSData *messageJSONObject =
+ [NSJSONSerialization dataWithJSONObject:message
+ options:NSJSONWritingPrettyPrinted
+ error:nil];
+ NSString *messageString =
+ [[NSString alloc] initWithData:messageJSONObject
+ encoding:NSUTF8StringEncoding];
+ RTCLog(@"C->WSS: %@", messageString);
+ [_socket send:messageString];
+ } else {
+ NSString *dataString =
+ [[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding];
+ RTCLog(@"C->WSS POST: %@", dataString);
+ NSString *urlString =
+ [NSString stringWithFormat:@"%@/%@/%@",
+ [_restURL absoluteString], _roomId, _clientId];
+ NSURL *url = [NSURL URLWithString:urlString];
+ [NSURLConnection sendAsyncPostToURL:url
+ withData:data
+ completionHandler:nil];
+ }
+}
+
+- (void)disconnect {
+ if (_state == kARDSignalingChannelStateClosed ||
+ _state == kARDSignalingChannelStateError) {
+ return;
+ }
+ [_socket close];
+ RTCLog(@"C->WSS DELETE rid:%@ cid:%@", _roomId, _clientId);
+ NSString *urlString =
+ [NSString stringWithFormat:@"%@/%@/%@",
+ [_restURL absoluteString], _roomId, _clientId];
+ NSURL *url = [NSURL URLWithString:urlString];
+ NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
+ request.HTTPMethod = @"DELETE";
+ request.HTTPBody = nil;
+ [NSURLConnection sendAsyncRequest:request completionHandler:nil];
+}
+
+#pragma mark - SRWebSocketDelegate
+
+- (void)webSocketDidOpen:(SRWebSocket *)webSocket {
+ RTCLog(@"WebSocket connection opened.");
+ self.state = kARDSignalingChannelStateOpen;
+ if (_roomId.length && _clientId.length) {
+ [self registerWithCollider];
+ }
+}
+
+- (void)webSocket:(SRWebSocket *)webSocket didReceiveMessage:(id)message {
+ NSString *messageString = message;
+ NSData *messageData = [messageString dataUsingEncoding:NSUTF8StringEncoding];
+ id jsonObject = [NSJSONSerialization JSONObjectWithData:messageData
+ options:0
+ error:nil];
+ if (![jsonObject isKindOfClass:[NSDictionary class]]) {
+ RTCLogError(@"Unexpected message: %@", jsonObject);
+ return;
+ }
+ NSDictionary *wssMessage = jsonObject;
+ NSString *errorString = wssMessage[kARDWSSMessageErrorKey];
+ if (errorString.length) {
+ RTCLogError(@"WSS error: %@", errorString);
+ return;
+ }
+ NSString *payload = wssMessage[kARDWSSMessagePayloadKey];
+ ARDSignalingMessage *signalingMessage =
+ [ARDSignalingMessage messageFromJSONString:payload];
+ RTCLog(@"WSS->C: %@", payload);
+ [_delegate channel:self didReceiveMessage:signalingMessage];
+}
+
+- (void)webSocket:(SRWebSocket *)webSocket didFailWithError:(NSError *)error {
+ RTCLogError(@"WebSocket error: %@", error);
+ self.state = kARDSignalingChannelStateError;
+}
+
+- (void)webSocket:(SRWebSocket *)webSocket
+ didCloseWithCode:(NSInteger)code
+ reason:(NSString *)reason
+ wasClean:(BOOL)wasClean {
+ RTCLog(@"WebSocket closed with code: %ld reason:%@ wasClean:%d",
+ (long)code, reason, wasClean);
+ NSParameterAssert(_state != kARDSignalingChannelStateError);
+ self.state = kARDSignalingChannelStateClosed;
+}
+
+#pragma mark - Private
+
+- (void)registerWithCollider {
+ if (_state == kARDSignalingChannelStateRegistered) {
+ return;
+ }
+ NSParameterAssert(_roomId.length);
+ NSParameterAssert(_clientId.length);
+ NSDictionary *registerMessage = @{
+ @"cmd": @"register",
+ @"roomid" : _roomId,
+ @"clientid" : _clientId,
+ };
+ NSData *message =
+ [NSJSONSerialization dataWithJSONObject:registerMessage
+ options:NSJSONWritingPrettyPrinted
+ error:nil];
+ NSString *messageString =
+ [[NSString alloc] initWithData:message encoding:NSUTF8StringEncoding];
+ RTCLog(@"Registering on WSS for rid:%@ cid:%@", _roomId, _clientId);
+ // Registration can fail if server rejects it. For example, if the room is
+ // full.
+ [_socket send:messageString];
+ self.state = kARDSignalingChannelStateRegistered;
+}
+
+@end
+
+@interface ARDLoopbackWebSocketChannel () <ARDSignalingChannelDelegate>
+@end
+
+@implementation ARDLoopbackWebSocketChannel
+
+- (instancetype)initWithURL:(NSURL *)url restURL:(NSURL *)restURL {
+ return [super initWithURL:url restURL:restURL delegate:self];
+}
+
+#pragma mark - ARDSignalingChannelDelegate
+
+- (void)channel:(id<ARDSignalingChannel>)channel
+ didReceiveMessage:(ARDSignalingMessage *)message {
+ switch (message.type) {
+ case kARDSignalingMessageTypeOffer: {
+ // Change message to answer, send back to server.
+ ARDSessionDescriptionMessage *sdpMessage =
+ (ARDSessionDescriptionMessage *)message;
+ RTC_OBJC_TYPE(RTCSessionDescription) *description = sdpMessage.sessionDescription;
+ NSString *dsc = description.sdp;
+ dsc = [dsc stringByReplacingOccurrencesOfString:@"offer"
+ withString:@"answer"];
+ RTC_OBJC_TYPE(RTCSessionDescription) *answerDescription =
+ [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:RTCSdpTypeAnswer sdp:dsc];
+ ARDSignalingMessage *answer =
+ [[ARDSessionDescriptionMessage alloc]
+ initWithDescription:answerDescription];
+ [self sendMessage:answer];
+ break;
+ }
+ case kARDSignalingMessageTypeAnswer:
+ // Should not receive answer in loopback scenario.
+ break;
+ case kARDSignalingMessageTypeCandidate:
+ case kARDSignalingMessageTypeCandidateRemoval:
+ // Send back to server.
+ [self sendMessage:message];
+ break;
+ case kARDSignalingMessageTypeBye:
+ // Nothing to do.
+ return;
+ }
+}
+
+- (void)channel:(id<ARDSignalingChannel>)channel
+ didChangeState:(ARDSignalingChannelState)state {
+}
+
+@end
+
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.h
new file mode 100644
index 0000000000..5fd823f2de
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "sdk/objc/api/peerconnection/RTCIceCandidate.h"
+
+@interface RTC_OBJC_TYPE (RTCIceCandidate)
+(JSON)
+
+ + (RTC_OBJC_TYPE(RTCIceCandidate) *)candidateFromJSONDictionary : (NSDictionary *)dictionary;
++ (NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidatesFromJSONDictionary:
+ (NSDictionary *)dictionary;
++ (NSData *)JSONDataForIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates
+ withType:(NSString *)typeValue;
+- (NSData *)JSONData;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.m
new file mode 100644
index 0000000000..99cefbff0b
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.m
@@ -0,0 +1,100 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCIceCandidate+JSON.h"
+
+#import "sdk/objc/base/RTCLogging.h"
+
+static NSString const *kRTCICECandidateTypeKey = @"type";
+static NSString const *kRTCICECandidateTypeValue = @"candidate";
+static NSString const *kRTCICECandidateMidKey = @"id";
+static NSString const *kRTCICECandidateMLineIndexKey = @"label";
+static NSString const *kRTCICECandidateSdpKey = @"candidate";
+static NSString const *kRTCICECandidatesTypeKey = @"candidates";
+
+@implementation RTC_OBJC_TYPE (RTCIceCandidate)
+(JSON)
+
+ + (RTC_OBJC_TYPE(RTCIceCandidate) *)candidateFromJSONDictionary : (NSDictionary *)dictionary {
+ NSString *mid = dictionary[kRTCICECandidateMidKey];
+ NSString *sdp = dictionary[kRTCICECandidateSdpKey];
+ NSNumber *num = dictionary[kRTCICECandidateMLineIndexKey];
+ NSInteger mLineIndex = [num integerValue];
+ return [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithSdp:sdp
+ sdpMLineIndex:mLineIndex
+ sdpMid:mid];
+}
+
++ (NSData *)JSONDataForIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates
+ withType:(NSString *)typeValue {
+ NSMutableArray *jsonCandidates =
+ [NSMutableArray arrayWithCapacity:candidates.count];
+ for (RTC_OBJC_TYPE(RTCIceCandidate) * candidate in candidates) {
+ NSDictionary *jsonCandidate = [candidate JSONDictionary];
+ [jsonCandidates addObject:jsonCandidate];
+ }
+ NSDictionary *json = @{
+ kRTCICECandidateTypeKey : typeValue,
+ kRTCICECandidatesTypeKey : jsonCandidates
+ };
+ NSError *error = nil;
+ NSData *data =
+ [NSJSONSerialization dataWithJSONObject:json
+ options:NSJSONWritingPrettyPrinted
+ error:&error];
+ if (error) {
+ RTCLogError(@"Error serializing JSON: %@", error);
+ return nil;
+ }
+ return data;
+}
+
++ (NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidatesFromJSONDictionary:
+ (NSDictionary *)dictionary {
+ NSArray *jsonCandidates = dictionary[kRTCICECandidatesTypeKey];
+ NSMutableArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *candidates =
+ [NSMutableArray arrayWithCapacity:jsonCandidates.count];
+ for (NSDictionary *jsonCandidate in jsonCandidates) {
+ RTC_OBJC_TYPE(RTCIceCandidate) *candidate =
+ [RTC_OBJC_TYPE(RTCIceCandidate) candidateFromJSONDictionary:jsonCandidate];
+ [candidates addObject:candidate];
+ }
+ return candidates;
+}
+
+- (NSData *)JSONData {
+ NSDictionary *json = @{
+ kRTCICECandidateTypeKey : kRTCICECandidateTypeValue,
+ kRTCICECandidateMLineIndexKey : @(self.sdpMLineIndex),
+ kRTCICECandidateMidKey : self.sdpMid,
+ kRTCICECandidateSdpKey : self.sdp
+ };
+ NSError *error = nil;
+ NSData *data =
+ [NSJSONSerialization dataWithJSONObject:json
+ options:NSJSONWritingPrettyPrinted
+ error:&error];
+ if (error) {
+ RTCLogError(@"Error serializing JSON: %@", error);
+ return nil;
+ }
+ return data;
+}
+
+- (NSDictionary *)JSONDictionary{
+ NSDictionary *json = @{
+ kRTCICECandidateMLineIndexKey : @(self.sdpMLineIndex),
+ kRTCICECandidateMidKey : self.sdpMid,
+ kRTCICECandidateSdpKey : self.sdp
+ };
+ return json;
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceServer+JSON.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceServer+JSON.h
new file mode 100644
index 0000000000..35f6af7583
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceServer+JSON.h
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "sdk/objc/api/peerconnection/RTCIceServer.h"
+
+@interface RTC_OBJC_TYPE (RTCIceServer)
+(JSON)
+
+ + (RTC_OBJC_TYPE(RTCIceServer) *)serverFromJSONDictionary : (NSDictionary *)dictionary;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceServer+JSON.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceServer+JSON.m
new file mode 100644
index 0000000000..b5272a2f64
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceServer+JSON.m
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCIceServer+JSON.h"
+
+@implementation RTC_OBJC_TYPE (RTCIceServer)
+(JSON)
+
+ + (RTC_OBJC_TYPE(RTCIceServer) *)serverFromJSONDictionary : (NSDictionary *)dictionary {
+ NSArray *turnUrls = dictionary[@"urls"];
+ NSString *username = dictionary[@"username"] ?: @"";
+ NSString *credential = dictionary[@"credential"] ?: @"";
+ return [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:turnUrls
+ username:username
+ credential:credential];
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.h
new file mode 100644
index 0000000000..74d03d11b9
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.h
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "sdk/objc/api/peerconnection/RTCSessionDescription.h"
+
+@interface RTC_OBJC_TYPE (RTCSessionDescription)
+(JSON)
+
+ + (RTC_OBJC_TYPE(RTCSessionDescription) *)descriptionFromJSONDictionary
+ : (NSDictionary *)dictionary;
+- (NSData *)JSONData;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.m
new file mode 100644
index 0000000000..28268faa84
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.m
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCSessionDescription+JSON.h"
+
+static NSString const *kRTCSessionDescriptionTypeKey = @"type";
+static NSString const *kRTCSessionDescriptionSdpKey = @"sdp";
+
+@implementation RTC_OBJC_TYPE (RTCSessionDescription)
+(JSON)
+
+ + (RTC_OBJC_TYPE(RTCSessionDescription) *)descriptionFromJSONDictionary
+ : (NSDictionary *)dictionary {
+ NSString *typeString = dictionary[kRTCSessionDescriptionTypeKey];
+ RTCSdpType type = [[self class] typeForString:typeString];
+ NSString *sdp = dictionary[kRTCSessionDescriptionSdpKey];
+ return [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:type sdp:sdp];
+}
+
+- (NSData *)JSONData {
+ NSString *type = [[self class] stringForType:self.type];
+ NSDictionary *json = @{
+ kRTCSessionDescriptionTypeKey : type,
+ kRTCSessionDescriptionSdpKey : self.sdp
+ };
+ return [NSJSONSerialization dataWithJSONObject:json options:0 error:nil];
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/common/ARDUtilities.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/common/ARDUtilities.h
new file mode 100644
index 0000000000..5f0d7dbef7
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/common/ARDUtilities.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+@interface NSDictionary (ARDUtilites)
+
+// Creates a dictionary with the keys and values in the JSON object.
++ (NSDictionary *)dictionaryWithJSONString:(NSString *)jsonString;
++ (NSDictionary *)dictionaryWithJSONData:(NSData *)jsonData;
+
+@end
+
+@interface NSURLConnection (ARDUtilities)
+
+// Issues an asynchronous request that calls back on main queue.
++ (void)sendAsyncRequest:(NSURLRequest *)request
+ completionHandler:
+ (void (^)(NSURLResponse *response, NSData *data, NSError *error))completionHandler;
+
+// Posts data to the specified URL.
++ (void)sendAsyncPostToURL:(NSURL *)url
+ withData:(NSData *)data
+ completionHandler:(void (^)(BOOL succeeded, NSData *data))completionHandler;
+
+@end
+
+NSInteger ARDGetCpuUsagePercentage(void);
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/common/ARDUtilities.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/common/ARDUtilities.m
new file mode 100644
index 0000000000..e0674f5210
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/common/ARDUtilities.m
@@ -0,0 +1,126 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDUtilities.h"
+
+#import <mach/mach.h>
+
+#import "sdk/objc/base/RTCLogging.h"
+
+@implementation NSDictionary (ARDUtilites)
+
++ (NSDictionary *)dictionaryWithJSONString:(NSString *)jsonString {
+ NSParameterAssert(jsonString.length > 0);
+ NSData *data = [jsonString dataUsingEncoding:NSUTF8StringEncoding];
+ NSError *error = nil;
+ NSDictionary *dict =
+ [NSJSONSerialization JSONObjectWithData:data options:0 error:&error];
+ if (error) {
+ RTCLogError(@"Error parsing JSON: %@", error.localizedDescription);
+ }
+ return dict;
+}
+
++ (NSDictionary *)dictionaryWithJSONData:(NSData *)jsonData {
+ NSError *error = nil;
+ NSDictionary *dict =
+ [NSJSONSerialization JSONObjectWithData:jsonData options:0 error:&error];
+ if (error) {
+ RTCLogError(@"Error parsing JSON: %@", error.localizedDescription);
+ }
+ return dict;
+}
+
+@end
+
+@implementation NSURLConnection (ARDUtilities)
+
++ (void)sendAsyncRequest:(NSURLRequest *)request
+ completionHandler:(void (^)(NSURLResponse *response,
+ NSData *data,
+ NSError *error))completionHandler {
+ // Kick off an async request which will call back on main thread.
+ NSURLSession *session = [NSURLSession sharedSession];
+ [[session dataTaskWithRequest:request
+ completionHandler:^(NSData *data, NSURLResponse *response, NSError *error) {
+ if (completionHandler) {
+ completionHandler(response, data, error);
+ }
+ }] resume];
+}
+
+// Posts data to the specified URL.
++ (void)sendAsyncPostToURL:(NSURL *)url
+ withData:(NSData *)data
+ completionHandler:(void (^)(BOOL succeeded,
+ NSData *data))completionHandler {
+ NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
+ request.HTTPMethod = @"POST";
+ request.HTTPBody = data;
+ [[self class] sendAsyncRequest:request
+ completionHandler:^(NSURLResponse *response,
+ NSData *data,
+ NSError *error) {
+ if (error) {
+ RTCLogError(@"Error posting data: %@", error.localizedDescription);
+ if (completionHandler) {
+ completionHandler(NO, data);
+ }
+ return;
+ }
+ NSHTTPURLResponse *httpResponse = (NSHTTPURLResponse *)response;
+ if (httpResponse.statusCode != 200) {
+ NSString *serverResponse = data.length > 0 ?
+ [[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding] :
+ nil;
+ RTCLogError(@"Received bad response: %@", serverResponse);
+ if (completionHandler) {
+ completionHandler(NO, data);
+ }
+ return;
+ }
+ if (completionHandler) {
+ completionHandler(YES, data);
+ }
+ }];
+}
+
+@end
+
+NSInteger ARDGetCpuUsagePercentage(void) {
+ // Create an array of thread ports for the current task.
+ const task_t task = mach_task_self();
+ thread_act_array_t thread_array;
+ mach_msg_type_number_t thread_count;
+ if (task_threads(task, &thread_array, &thread_count) != KERN_SUCCESS) {
+ return -1;
+ }
+
+ // Sum cpu usage from all threads.
+ float cpu_usage_percentage = 0;
+ thread_basic_info_data_t thread_info_data = {};
+ mach_msg_type_number_t thread_info_count;
+ for (size_t i = 0; i < thread_count; ++i) {
+ thread_info_count = THREAD_BASIC_INFO_COUNT;
+ kern_return_t ret = thread_info(thread_array[i],
+ THREAD_BASIC_INFO,
+ (thread_info_t)&thread_info_data,
+ &thread_info_count);
+ if (ret == KERN_SUCCESS) {
+ cpu_usage_percentage +=
+ 100.f * (float)thread_info_data.cpu_usage / TH_USAGE_SCALE;
+ }
+ }
+
+ // Dealloc the created array.
+ vm_deallocate(task, (vm_address_t)thread_array,
+ sizeof(thread_act_t) * thread_count);
+ return lroundf(cpu_usage_percentage);
+}
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDAppDelegate.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDAppDelegate.h
new file mode 100644
index 0000000000..7eafff8ebc
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDAppDelegate.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2013 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+// The main application class of the AppRTCMobile iOS app demonstrating
+// interoperability between the Objective C implementation of PeerConnection
+// and the appr.tc demo webapp.
+@interface ARDAppDelegate : NSObject <UIApplicationDelegate>
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDAppDelegate.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDAppDelegate.m
new file mode 100644
index 0000000000..51e9910b87
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDAppDelegate.m
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2013 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDAppDelegate.h"
+
+#import "sdk/objc/api/peerconnection/RTCFieldTrials.h"
+#import "sdk/objc/api/peerconnection/RTCSSLAdapter.h"
+#import "sdk/objc/api/peerconnection/RTCTracing.h"
+#import "sdk/objc/base/RTCLogging.h"
+
+#import "ARDMainViewController.h"
+
+@implementation ARDAppDelegate {
+ UIWindow *_window;
+}
+
+#pragma mark - UIApplicationDelegate methods
+
+- (BOOL)application:(UIApplication *)application
+ didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
+ NSDictionary *fieldTrials = @{};
+ RTCInitFieldTrialDictionary(fieldTrials);
+ RTCInitializeSSL();
+ RTCSetupInternalTracer();
+ _window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]];
+ [_window makeKeyAndVisible];
+ ARDMainViewController *viewController = [[ARDMainViewController alloc] init];
+
+ UINavigationController *root =
+ [[UINavigationController alloc] initWithRootViewController:viewController];
+ root.navigationBar.translucent = NO;
+ _window.rootViewController = root;
+
+#if defined(NDEBUG)
+ // In debug builds the default level is LS_INFO and in non-debug builds it is
+ // disabled. Continue to log to console in non-debug builds, but only
+ // warnings and errors.
+ RTCSetMinDebugLogLevel(RTCLoggingSeverityWarning);
+#endif
+
+ return YES;
+}
+
+- (void)applicationWillTerminate:(UIApplication *)application {
+ RTCShutdownInternalTracer();
+ RTCCleanupSSL();
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.h
new file mode 100644
index 0000000000..82f8fcdd1b
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "sdk/objc/base/RTCMacros.h"
+
+@class RTC_OBJC_TYPE(RTCFileVideoCapturer);
+
+/**
+ * Controls a file capturer.
+ */
+NS_CLASS_AVAILABLE_IOS(10)
+@interface ARDFileCaptureController : NSObject
+
+/**
+ * Creates instance of the controller.
+ *
+ * @param capturer The capturer to be controlled.
+ */
+- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)capturer;
+
+/**
+ * Starts the file capturer.
+ *
+ * Possible errors produced by the capturer will be logged.
+ */
+- (void)startCapture;
+
+/**
+ * Immediately stops capturer.
+ */
+- (void)stopCapture;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.m
new file mode 100644
index 0000000000..2ddde6dd59
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.m
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDFileCaptureController.h"
+
+#import "sdk/objc/components/capturer/RTCFileVideoCapturer.h"
+
+@interface ARDFileCaptureController ()
+
+@property(nonatomic, strong) RTC_OBJC_TYPE(RTCFileVideoCapturer) * fileCapturer;
+
+@end
+
+@implementation ARDFileCaptureController
+@synthesize fileCapturer = _fileCapturer;
+
+- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)capturer {
+ if (self = [super init]) {
+ _fileCapturer = capturer;
+ }
+ return self;
+}
+
+- (void)startCapture {
+ [self startFileCapture];
+}
+
+- (void)startFileCapture {
+ [self.fileCapturer startCapturingFromFileNamed:@"foreman.mp4"
+ onError:^(NSError *_Nonnull error) {
+ NSLog(@"Error %@", error.userInfo);
+ }];
+}
+
+- (void)stopCapture {
+ [self.fileCapturer stopCapture];
+}
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainView.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainView.h
new file mode 100644
index 0000000000..c6691c2d84
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainView.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+@class ARDMainView;
+
+@protocol ARDMainViewDelegate <NSObject>
+
+- (void)mainView:(ARDMainView *)mainView didInputRoom:(NSString *)room isLoopback:(BOOL)isLoopback;
+- (void)mainViewDidToggleAudioLoop:(ARDMainView *)mainView;
+
+@end
+
+// The main view of AppRTCMobile. It contains an input field for entering a room
+// name on apprtc to connect to.
+@interface ARDMainView : UIView
+
+@property(nonatomic, weak) id<ARDMainViewDelegate> delegate;
+// Updates the audio loop button as needed.
+@property(nonatomic, assign) BOOL isAudioLoopPlaying;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainView.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainView.m
new file mode 100644
index 0000000000..d9521060eb
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainView.m
@@ -0,0 +1,196 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDMainView.h"
+
+#import "UIImage+ARDUtilities.h"
+
+static CGFloat const kRoomTextFieldHeight = 40;
+static CGFloat const kRoomTextFieldMargin = 8;
+static CGFloat const kCallControlMargin = 8;
+
+// Helper view that contains a text field and a clear button.
+@interface ARDRoomTextField : UIView <UITextFieldDelegate>
+@property(nonatomic, readonly) NSString *roomText;
+@end
+
+@implementation ARDRoomTextField {
+ UITextField *_roomText;
+}
+
+- (instancetype)initWithFrame:(CGRect)frame {
+ if (self = [super initWithFrame:frame]) {
+ _roomText = [[UITextField alloc] initWithFrame:CGRectZero];
+ _roomText.borderStyle = UITextBorderStyleNone;
+ _roomText.font = [UIFont systemFontOfSize:12];
+ _roomText.placeholder = @"Room name";
+ _roomText.autocorrectionType = UITextAutocorrectionTypeNo;
+ _roomText.autocapitalizationType = UITextAutocapitalizationTypeNone;
+ _roomText.clearButtonMode = UITextFieldViewModeAlways;
+ _roomText.delegate = self;
+ [self addSubview:_roomText];
+
+ // Give rounded corners and a light gray border.
+ self.layer.borderWidth = 1;
+ self.layer.borderColor = [[UIColor lightGrayColor] CGColor];
+ self.layer.cornerRadius = 2;
+ }
+ return self;
+}
+
+- (void)layoutSubviews {
+ _roomText.frame =
+ CGRectMake(kRoomTextFieldMargin, 0, CGRectGetWidth(self.bounds) - kRoomTextFieldMargin,
+ kRoomTextFieldHeight);
+}
+
+- (CGSize)sizeThatFits:(CGSize)size {
+ size.height = kRoomTextFieldHeight;
+ return size;
+}
+
+- (NSString *)roomText {
+ return _roomText.text;
+}
+
+#pragma mark - UITextFieldDelegate
+
+- (BOOL)textFieldShouldReturn:(UITextField *)textField {
+ // There is no other control that can take focus, so manually resign focus
+ // when return (Join) is pressed to trigger `textFieldDidEndEditing`.
+ [textField resignFirstResponder];
+ return YES;
+}
+
+@end
+
+@implementation ARDMainView {
+ ARDRoomTextField *_roomText;
+ UIButton *_startRegularCallButton;
+ UIButton *_startLoopbackCallButton;
+ UIButton *_audioLoopButton;
+}
+
+@synthesize delegate = _delegate;
+@synthesize isAudioLoopPlaying = _isAudioLoopPlaying;
+
+- (instancetype)initWithFrame:(CGRect)frame {
+ if (self = [super initWithFrame:frame]) {
+ _roomText = [[ARDRoomTextField alloc] initWithFrame:CGRectZero];
+ [self addSubview:_roomText];
+
+ UIFont *controlFont = [UIFont boldSystemFontOfSize:18.0];
+ UIColor *controlFontColor = [UIColor whiteColor];
+
+ _startRegularCallButton = [UIButton buttonWithType:UIButtonTypeSystem];
+ _startRegularCallButton.titleLabel.font = controlFont;
+ [_startRegularCallButton setTitleColor:controlFontColor forState:UIControlStateNormal];
+ _startRegularCallButton.backgroundColor
+ = [UIColor colorWithRed:66.0/255.0 green:200.0/255.0 blue:90.0/255.0 alpha:1.0];
+ [_startRegularCallButton setTitle:@"Call room" forState:UIControlStateNormal];
+ [_startRegularCallButton addTarget:self
+ action:@selector(onStartRegularCall:)
+ forControlEvents:UIControlEventTouchUpInside];
+ [self addSubview:_startRegularCallButton];
+
+ _startLoopbackCallButton = [UIButton buttonWithType:UIButtonTypeSystem];
+ _startLoopbackCallButton.titleLabel.font = controlFont;
+ [_startLoopbackCallButton setTitleColor:controlFontColor forState:UIControlStateNormal];
+ _startLoopbackCallButton.backgroundColor =
+ [UIColor colorWithRed:0.0 green:122.0/255.0 blue:1.0 alpha:1.0];
+ [_startLoopbackCallButton setTitle:@"Loopback call" forState:UIControlStateNormal];
+ [_startLoopbackCallButton addTarget:self
+ action:@selector(onStartLoopbackCall:)
+ forControlEvents:UIControlEventTouchUpInside];
+ [self addSubview:_startLoopbackCallButton];
+
+
+ // Used to test what happens to sounds when calls are in progress.
+ _audioLoopButton = [UIButton buttonWithType:UIButtonTypeSystem];
+ _audioLoopButton.titleLabel.font = controlFont;
+ [_audioLoopButton setTitleColor:controlFontColor forState:UIControlStateNormal];
+ _audioLoopButton.backgroundColor =
+ [UIColor colorWithRed:1.0 green:149.0/255.0 blue:0.0 alpha:1.0];
+ [self updateAudioLoopButton];
+ [_audioLoopButton addTarget:self
+ action:@selector(onToggleAudioLoop:)
+ forControlEvents:UIControlEventTouchUpInside];
+ [self addSubview:_audioLoopButton];
+
+ self.backgroundColor = [UIColor whiteColor];
+ }
+ return self;
+}
+
+- (void)setIsAudioLoopPlaying:(BOOL)isAudioLoopPlaying {
+ if (_isAudioLoopPlaying == isAudioLoopPlaying) {
+ return;
+ }
+ _isAudioLoopPlaying = isAudioLoopPlaying;
+ [self updateAudioLoopButton];
+}
+
+- (void)layoutSubviews {
+ CGRect bounds = self.bounds;
+ CGFloat roomTextWidth = bounds.size.width - 2 * kRoomTextFieldMargin;
+ CGFloat roomTextHeight = [_roomText sizeThatFits:bounds.size].height;
+ _roomText.frame =
+ CGRectMake(kRoomTextFieldMargin, kRoomTextFieldMargin, roomTextWidth,
+ roomTextHeight);
+
+ CGFloat buttonHeight =
+ (CGRectGetMaxY(self.bounds) - CGRectGetMaxY(_roomText.frame) - kCallControlMargin * 4) / 3;
+
+ CGFloat regularCallFrameTop = CGRectGetMaxY(_roomText.frame) + kCallControlMargin;
+ CGRect regularCallFrame = CGRectMake(kCallControlMargin,
+ regularCallFrameTop,
+ bounds.size.width - 2*kCallControlMargin,
+ buttonHeight);
+
+ CGFloat loopbackCallFrameTop = CGRectGetMaxY(regularCallFrame) + kCallControlMargin;
+ CGRect loopbackCallFrame = CGRectMake(kCallControlMargin,
+ loopbackCallFrameTop,
+ bounds.size.width - 2*kCallControlMargin,
+ buttonHeight);
+
+ CGFloat audioLoopTop = CGRectGetMaxY(loopbackCallFrame) + kCallControlMargin;
+ CGRect audioLoopFrame = CGRectMake(kCallControlMargin,
+ audioLoopTop,
+ bounds.size.width - 2*kCallControlMargin,
+ buttonHeight);
+
+ _startRegularCallButton.frame = regularCallFrame;
+ _startLoopbackCallButton.frame = loopbackCallFrame;
+ _audioLoopButton.frame = audioLoopFrame;
+}
+
+#pragma mark - Private
+
+- (void)updateAudioLoopButton {
+ if (_isAudioLoopPlaying) {
+ [_audioLoopButton setTitle:@"Stop sound" forState:UIControlStateNormal];
+ } else {
+ [_audioLoopButton setTitle:@"Play sound" forState:UIControlStateNormal];
+ }
+}
+
+- (void)onToggleAudioLoop:(id)sender {
+ [_delegate mainViewDidToggleAudioLoop:self];
+}
+
+- (void)onStartRegularCall:(id)sender {
+ [_delegate mainView:self didInputRoom:_roomText.roomText isLoopback:NO];
+}
+
+- (void)onStartLoopbackCall:(id)sender {
+ [_delegate mainView:self didInputRoom:_roomText.roomText isLoopback:YES];
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainViewController.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainViewController.h
new file mode 100644
index 0000000000..e5c92dd304
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainViewController.h
@@ -0,0 +1,14 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+@interface ARDMainViewController : UIViewController
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainViewController.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainViewController.m
new file mode 100644
index 0000000000..e8b8112e41
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainViewController.m
@@ -0,0 +1,263 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDMainViewController.h"
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "sdk/objc/base/RTCLogging.h"
+#import "sdk/objc/components/audio/RTCAudioSession.h"
+#import "sdk/objc/components/audio/RTCAudioSessionConfiguration.h"
+#import "sdk/objc/helpers/RTCDispatcher.h"
+
+#import "ARDAppClient.h"
+#import "ARDMainView.h"
+#import "ARDSettingsModel.h"
+#import "ARDSettingsViewController.h"
+#import "ARDVideoCallViewController.h"
+
+static NSString *const barButtonImageString = @"ic_settings_black_24dp.png";
+
+// Launch argument to be passed to indicate that the app should start loopback immediatly
+static NSString *const loopbackLaunchProcessArgument = @"loopback";
+
+@interface ARDMainViewController () <ARDMainViewDelegate,
+ ARDVideoCallViewControllerDelegate,
+ RTC_OBJC_TYPE (RTCAudioSessionDelegate)>
+@property(nonatomic, strong) ARDMainView *mainView;
+@property(nonatomic, strong) AVAudioPlayer *audioPlayer;
+@end
+
+@implementation ARDMainViewController {
+ BOOL _useManualAudio;
+}
+
+@synthesize mainView = _mainView;
+@synthesize audioPlayer = _audioPlayer;
+
+- (void)viewDidLoad {
+ [super viewDidLoad];
+ if ([[[NSProcessInfo processInfo] arguments] containsObject:loopbackLaunchProcessArgument]) {
+ [self mainView:nil didInputRoom:@"" isLoopback:YES];
+ }
+}
+
+- (void)loadView {
+ self.title = @"AppRTC Mobile";
+ _mainView = [[ARDMainView alloc] initWithFrame:CGRectZero];
+ _mainView.delegate = self;
+ self.view = _mainView;
+ [self addSettingsBarButton];
+
+ RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *webRTCConfig =
+ [RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration];
+ webRTCConfig.categoryOptions = webRTCConfig.categoryOptions |
+ AVAudioSessionCategoryOptionDefaultToSpeaker;
+ [RTC_OBJC_TYPE(RTCAudioSessionConfiguration) setWebRTCConfiguration:webRTCConfig];
+
+ RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session addDelegate:self];
+
+ [self configureAudioSession];
+ [self setupAudioPlayer];
+}
+
+- (void)addSettingsBarButton {
+ UIBarButtonItem *settingsButton =
+ [[UIBarButtonItem alloc] initWithImage:[UIImage imageNamed:barButtonImageString]
+ style:UIBarButtonItemStylePlain
+ target:self
+ action:@selector(showSettings:)];
+ self.navigationItem.rightBarButtonItem = settingsButton;
+}
+
++ (NSString *)loopbackRoomString {
+ NSString *loopbackRoomString =
+ [[NSUUID UUID].UUIDString stringByReplacingOccurrencesOfString:@"-" withString:@""];
+ return loopbackRoomString;
+}
+
+#pragma mark - ARDMainViewDelegate
+
+- (void)mainView:(ARDMainView *)mainView didInputRoom:(NSString *)room isLoopback:(BOOL)isLoopback {
+ if (!room.length) {
+ if (isLoopback) {
+ // If this is a loopback call, allow a generated room name.
+ room = [[self class] loopbackRoomString];
+ } else {
+ [self showAlertWithMessage:@"Missing room name."];
+ return;
+ }
+ }
+ // Trim whitespaces.
+ NSCharacterSet *whitespaceSet = [NSCharacterSet whitespaceCharacterSet];
+ NSString *trimmedRoom = [room stringByTrimmingCharactersInSet:whitespaceSet];
+
+ // Check that room name is valid.
+ NSError *error = nil;
+ NSRegularExpressionOptions options = NSRegularExpressionCaseInsensitive;
+ NSRegularExpression *regex =
+ [NSRegularExpression regularExpressionWithPattern:@"\\w+"
+ options:options
+ error:&error];
+ if (error) {
+ [self showAlertWithMessage:error.localizedDescription];
+ return;
+ }
+ NSRange matchRange =
+ [regex rangeOfFirstMatchInString:trimmedRoom
+ options:0
+ range:NSMakeRange(0, trimmedRoom.length)];
+ if (matchRange.location == NSNotFound ||
+ matchRange.length != trimmedRoom.length) {
+ [self showAlertWithMessage:@"Invalid room name."];
+ return;
+ }
+
+ ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
+
+ RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ session.useManualAudio = [settingsModel currentUseManualAudioConfigSettingFromStore];
+ session.isAudioEnabled = NO;
+
+ // Kick off the video call.
+ ARDVideoCallViewController *videoCallViewController =
+ [[ARDVideoCallViewController alloc] initForRoom:trimmedRoom
+ isLoopback:isLoopback
+ delegate:self];
+ videoCallViewController.modalTransitionStyle =
+ UIModalTransitionStyleCrossDissolve;
+ videoCallViewController.modalPresentationStyle = UIModalPresentationFullScreen;
+ [self presentViewController:videoCallViewController
+ animated:YES
+ completion:nil];
+}
+
+- (void)mainViewDidToggleAudioLoop:(ARDMainView *)mainView {
+ if (mainView.isAudioLoopPlaying) {
+ [_audioPlayer stop];
+ } else {
+ [_audioPlayer play];
+ }
+ mainView.isAudioLoopPlaying = _audioPlayer.playing;
+}
+
+#pragma mark - ARDVideoCallViewControllerDelegate
+
+- (void)viewControllerDidFinish:(ARDVideoCallViewController *)viewController {
+ if (![viewController isBeingDismissed]) {
+ RTCLog(@"Dismissing VC");
+ [self dismissViewControllerAnimated:YES completion:^{
+ [self restartAudioPlayerIfNeeded];
+ }];
+ }
+ RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ session.isAudioEnabled = NO;
+}
+
+#pragma mark - RTC_OBJC_TYPE(RTCAudioSessionDelegate)
+
+- (void)audioSessionDidStartPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
+ // Stop playback on main queue and then configure WebRTC.
+ [RTC_OBJC_TYPE(RTCDispatcher)
+ dispatchAsyncOnType:RTCDispatcherTypeMain
+ block:^{
+ if (self.mainView.isAudioLoopPlaying) {
+ RTCLog(@"Stopping audio loop due to WebRTC start.");
+ [self.audioPlayer stop];
+ }
+ RTCLog(@"Setting isAudioEnabled to YES.");
+ session.isAudioEnabled = YES;
+ }];
+}
+
+- (void)audioSessionDidStopPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
+ // WebRTC is done with the audio session. Restart playback.
+ [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeMain
+ block:^{
+ RTCLog(@"audioSessionDidStopPlayOrRecord");
+ [self restartAudioPlayerIfNeeded];
+ }];
+}
+
+#pragma mark - Private
+- (void)showSettings:(id)sender {
+ ARDSettingsViewController *settingsController =
+ [[ARDSettingsViewController alloc] initWithStyle:UITableViewStyleGrouped
+ settingsModel:[[ARDSettingsModel alloc] init]];
+
+ UINavigationController *navigationController =
+ [[UINavigationController alloc] initWithRootViewController:settingsController];
+ [self presentViewControllerAsModal:navigationController];
+}
+
+- (void)presentViewControllerAsModal:(UIViewController *)viewController {
+ [self presentViewController:viewController animated:YES completion:nil];
+}
+
+- (void)configureAudioSession {
+ RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *configuration =
+ [[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) alloc] init];
+ configuration.category = AVAudioSessionCategoryAmbient;
+ configuration.categoryOptions = AVAudioSessionCategoryOptionDuckOthers;
+ configuration.mode = AVAudioSessionModeDefault;
+
+ RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session lockForConfiguration];
+ BOOL hasSucceeded = NO;
+ NSError *error = nil;
+ if (session.isActive) {
+ hasSucceeded = [session setConfiguration:configuration error:&error];
+ } else {
+ hasSucceeded = [session setConfiguration:configuration
+ active:YES
+ error:&error];
+ }
+ if (!hasSucceeded) {
+ RTCLogError(@"Error setting configuration: %@", error.localizedDescription);
+ }
+ [session unlockForConfiguration];
+}
+
+- (void)setupAudioPlayer {
+ NSString *audioFilePath =
+ [[NSBundle mainBundle] pathForResource:@"mozart" ofType:@"mp3"];
+ NSURL *audioFileURL = [NSURL URLWithString:audioFilePath];
+ _audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:audioFileURL
+ error:nil];
+ _audioPlayer.numberOfLoops = -1;
+ _audioPlayer.volume = 1.0;
+ [_audioPlayer prepareToPlay];
+}
+
+- (void)restartAudioPlayerIfNeeded {
+ [self configureAudioSession];
+ if (_mainView.isAudioLoopPlaying && !self.presentedViewController) {
+ RTCLog(@"Starting audio loop due to WebRTC end.");
+ [_audioPlayer play];
+ }
+}
+
+- (void)showAlertWithMessage:(NSString*)message {
+ UIAlertController *alert =
+ [UIAlertController alertControllerWithTitle:nil
+ message:message
+ preferredStyle:UIAlertControllerStyleAlert];
+
+ UIAlertAction *defaultAction = [UIAlertAction actionWithTitle:@"OK"
+ style:UIAlertActionStyleDefault
+ handler:^(UIAlertAction *action){
+ }];
+
+ [alert addAction:defaultAction];
+ [self presentViewController:alert animated:YES completion:nil];
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDSettingsViewController.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDSettingsViewController.h
new file mode 100644
index 0000000000..759af5416f
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDSettingsViewController.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+@class ARDSettingsModel;
+
+NS_ASSUME_NONNULL_BEGIN
+/**
+ * Displays settings options.
+ */
+@interface ARDSettingsViewController : UITableViewController
+
+/**
+ * Creates new instance.
+ *
+ * @param style the table view style that should be used
+ * @param settingsModel model class for the user settings.
+ */
+- (instancetype)initWithStyle:(UITableViewStyle)style
+ settingsModel:(ARDSettingsModel *)settingsModel;
+
+#pragma mark - Unavailable
+
+- (instancetype)initWithStyle:(UITableViewStyle)style NS_UNAVAILABLE;
+- (instancetype)init NS_UNAVAILABLE;
++ (instancetype)new NS_UNAVAILABLE;
+
+@end
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDSettingsViewController.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDSettingsViewController.m
new file mode 100644
index 0000000000..9bcbd3aa5c
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDSettingsViewController.m
@@ -0,0 +1,361 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDSettingsViewController.h"
+#import "ARDSettingsModel.h"
+#import "RTCVideoCodecInfo+HumanReadable.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+typedef NS_ENUM(int, ARDSettingsSections) {
+ ARDSettingsSectionAudioSettings = 0,
+ ARDSettingsSectionVideoResolution,
+ ARDSettingsSectionVideoCodec,
+ ARDSettingsSectionBitRate,
+};
+
+typedef NS_ENUM(int, ARDAudioSettingsOptions) {
+ ARDAudioSettingsAudioOnly = 0,
+ ARDAudioSettingsCreateAecDump,
+ ARDAudioSettingsUseManualAudioConfig,
+};
+
+@interface ARDSettingsViewController () <UITextFieldDelegate> {
+ ARDSettingsModel *_settingsModel;
+}
+
+@end
+
+@implementation ARDSettingsViewController
+
+- (instancetype)initWithStyle:(UITableViewStyle)style
+ settingsModel:(ARDSettingsModel *)settingsModel {
+ self = [super initWithStyle:style];
+ if (self) {
+ _settingsModel = settingsModel;
+ }
+ return self;
+}
+
+#pragma mark - View lifecycle
+
+- (void)viewDidLoad {
+ [super viewDidLoad];
+ self.title = @"Settings";
+ [self addDoneBarButton];
+}
+
+- (void)viewWillAppear:(BOOL)animated {
+ [super viewWillAppear:animated];
+}
+
+#pragma mark - Data source
+
+- (NSArray<NSString *> *)videoResolutionArray {
+ return [_settingsModel availableVideoResolutions];
+}
+
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)videoCodecArray {
+ return [_settingsModel availableVideoCodecs];
+}
+
+#pragma mark -
+
+- (void)addDoneBarButton {
+ UIBarButtonItem *barItem =
+ [[UIBarButtonItem alloc] initWithBarButtonSystemItem:UIBarButtonSystemItemDone
+ target:self
+ action:@selector(dismissModally:)];
+ self.navigationItem.leftBarButtonItem = barItem;
+}
+
+#pragma mark - Dismissal of view controller
+
+- (void)dismissModally:(id)sender {
+ [self dismissViewControllerAnimated:YES completion:nil];
+}
+
+#pragma mark - Table view data source
+
+- (NSInteger)numberOfSectionsInTableView:(UITableView *)tableView {
+ return 4;
+}
+
+- (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section {
+ switch (section) {
+ case ARDSettingsSectionAudioSettings:
+ return 3;
+ case ARDSettingsSectionVideoResolution:
+ return self.videoResolutionArray.count;
+ case ARDSettingsSectionVideoCodec:
+ return self.videoCodecArray.count;
+ default:
+ return 1;
+ }
+}
+
+#pragma mark - Table view delegate helpers
+
+- (void)removeAllAccessories:(UITableView *)tableView
+ inSection:(int)section
+{
+ for (int i = 0; i < [tableView numberOfRowsInSection:section]; i++) {
+ NSIndexPath *rowPath = [NSIndexPath indexPathForRow:i inSection:section];
+ UITableViewCell *cell = [tableView cellForRowAtIndexPath:rowPath];
+ cell.accessoryType = UITableViewCellAccessoryNone;
+ }
+}
+
+- (void)tableView:(UITableView *)tableView
+updateListSelectionAtIndexPath:(NSIndexPath *)indexPath
+ inSection:(int)section {
+ [self removeAllAccessories:tableView inSection:section];
+ UITableViewCell *cell = [tableView cellForRowAtIndexPath:indexPath];
+ cell.accessoryType = UITableViewCellAccessoryCheckmark;
+ [tableView deselectRowAtIndexPath:indexPath animated:YES];
+}
+
+#pragma mark - Table view delegate
+
+- (nullable NSString *)tableView:(UITableView *)tableView
+ titleForHeaderInSection:(NSInteger)section {
+ switch (section) {
+ case ARDSettingsSectionAudioSettings:
+ return @"Audio";
+ case ARDSettingsSectionVideoResolution:
+ return @"Video resolution";
+ case ARDSettingsSectionVideoCodec:
+ return @"Video codec";
+ case ARDSettingsSectionBitRate:
+ return @"Maximum bitrate";
+ default:
+ return @"";
+ }
+}
+
+- (UITableViewCell *)tableView:(UITableView *)tableView
+ cellForRowAtIndexPath:(NSIndexPath *)indexPath {
+ switch (indexPath.section) {
+ case ARDSettingsSectionAudioSettings:
+ return [self audioSettingsTableViewCellForTableView:tableView atIndexPath:indexPath];
+
+ case ARDSettingsSectionVideoResolution:
+ return [self videoResolutionTableViewCellForTableView:tableView atIndexPath:indexPath];
+
+ case ARDSettingsSectionVideoCodec:
+ return [self videoCodecTableViewCellForTableView:tableView atIndexPath:indexPath];
+
+ case ARDSettingsSectionBitRate:
+ return [self bitrateTableViewCellForTableView:tableView atIndexPath:indexPath];
+
+ default:
+ return [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
+ reuseIdentifier:@"identifier"];
+ }
+}
+
+- (void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath {
+ switch (indexPath.section) {
+ case ARDSettingsSectionVideoResolution:
+ [self tableView:tableView disSelectVideoResolutionAtIndex:indexPath];
+ break;
+
+ case ARDSettingsSectionVideoCodec:
+ [self tableView:tableView didSelectVideoCodecCellAtIndexPath:indexPath];
+ break;
+ }
+}
+
+#pragma mark - Table view delegate(Video Resolution)
+
+- (UITableViewCell *)videoResolutionTableViewCellForTableView:(UITableView *)tableView
+ atIndexPath:(NSIndexPath *)indexPath {
+ NSString *dequeueIdentifier = @"ARDSettingsVideoResolutionViewCellIdentifier";
+ UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
+ if (!cell) {
+ cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
+ reuseIdentifier:dequeueIdentifier];
+ }
+ NSString *resolution = self.videoResolutionArray[indexPath.row];
+ cell.textLabel.text = resolution;
+ if ([resolution isEqualToString:[_settingsModel currentVideoResolutionSettingFromStore]]) {
+ cell.accessoryType = UITableViewCellAccessoryCheckmark;
+ } else {
+ cell.accessoryType = UITableViewCellAccessoryNone;
+ }
+
+ return cell;
+}
+
+- (void)tableView:(UITableView *)tableView
+ disSelectVideoResolutionAtIndex:(NSIndexPath *)indexPath {
+ [self tableView:tableView
+ updateListSelectionAtIndexPath:indexPath
+ inSection:ARDSettingsSectionVideoResolution];
+
+ NSString *videoResolution = self.videoResolutionArray[indexPath.row];
+ [_settingsModel storeVideoResolutionSetting:videoResolution];
+}
+
+#pragma mark - Table view delegate(Video Codec)
+
+- (UITableViewCell *)videoCodecTableViewCellForTableView:(UITableView *)tableView
+ atIndexPath:(NSIndexPath *)indexPath {
+ NSString *dequeueIdentifier = @"ARDSettingsVideoCodecCellIdentifier";
+ UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
+ if (!cell) {
+ cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
+ reuseIdentifier:dequeueIdentifier];
+ }
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *codec = self.videoCodecArray[indexPath.row];
+ cell.textLabel.text = [codec humanReadableDescription];
+ if ([codec isEqualToCodecInfo:[_settingsModel currentVideoCodecSettingFromStore]]) {
+ cell.accessoryType = UITableViewCellAccessoryCheckmark;
+ } else {
+ cell.accessoryType = UITableViewCellAccessoryNone;
+ }
+
+ return cell;
+}
+
+- (void)tableView:(UITableView *)tableView
+ didSelectVideoCodecCellAtIndexPath:(NSIndexPath *)indexPath {
+ [self tableView:tableView
+ updateListSelectionAtIndexPath:indexPath
+ inSection:ARDSettingsSectionVideoCodec];
+
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *videoCodec = self.videoCodecArray[indexPath.row];
+ [_settingsModel storeVideoCodecSetting:videoCodec];
+}
+
+#pragma mark - Table view delegate(Bitrate)
+
+- (UITableViewCell *)bitrateTableViewCellForTableView:(UITableView *)tableView
+ atIndexPath:(NSIndexPath *)indexPath {
+ NSString *dequeueIdentifier = @"ARDSettingsBitrateCellIdentifier";
+ UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
+ if (!cell) {
+ cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
+ reuseIdentifier:dequeueIdentifier];
+
+ UITextField *textField = [[UITextField alloc]
+ initWithFrame:CGRectMake(10, 0, cell.bounds.size.width - 20, cell.bounds.size.height)];
+ NSString *currentMaxBitrate = [_settingsModel currentMaxBitrateSettingFromStore].stringValue;
+ textField.text = currentMaxBitrate;
+ textField.placeholder = @"Enter max bit rate (kbps)";
+ textField.keyboardType = UIKeyboardTypeNumberPad;
+ textField.delegate = self;
+
+ // Numerical keyboards have no return button, we need to add one manually.
+ UIToolbar *numberToolbar =
+ [[UIToolbar alloc] initWithFrame:CGRectMake(0, 0, self.view.bounds.size.width, 50)];
+ numberToolbar.items = @[
+ [[UIBarButtonItem alloc] initWithBarButtonSystemItem:UIBarButtonSystemItemFlexibleSpace
+ target:nil
+ action:nil],
+ [[UIBarButtonItem alloc] initWithTitle:@"Apply"
+ style:UIBarButtonItemStyleDone
+ target:self
+ action:@selector(numberTextFieldDidEndEditing:)]
+ ];
+ [numberToolbar sizeToFit];
+
+ textField.inputAccessoryView = numberToolbar;
+ [cell addSubview:textField];
+ }
+ return cell;
+}
+
+- (void)numberTextFieldDidEndEditing:(id)sender {
+ [self.view endEditing:YES];
+}
+
+- (void)textFieldDidEndEditing:(UITextField *)textField {
+ NSNumber *bitrateNumber = nil;
+
+ if (textField.text.length != 0) {
+ bitrateNumber = [NSNumber numberWithInteger:textField.text.intValue];
+ }
+
+ [_settingsModel storeMaxBitrateSetting:bitrateNumber];
+}
+
+#pragma mark - Table view delegate(Audio settings)
+
+- (UITableViewCell *)audioSettingsTableViewCellForTableView:(UITableView *)tableView
+ atIndexPath:(NSIndexPath *)indexPath {
+ NSString *dequeueIdentifier = @"ARDSettingsAudioSettingsCellIdentifier";
+ UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
+ if (!cell) {
+ cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
+ reuseIdentifier:dequeueIdentifier];
+ cell.selectionStyle = UITableViewCellSelectionStyleNone;
+ UISwitch *switchView = [[UISwitch alloc] initWithFrame:CGRectZero];
+ switchView.tag = indexPath.row;
+ [switchView addTarget:self
+ action:@selector(audioSettingSwitchChanged:)
+ forControlEvents:UIControlEventValueChanged];
+ cell.accessoryView = switchView;
+ }
+
+ cell.textLabel.text = [self labelForAudioSettingAtIndexPathRow:indexPath.row];
+ UISwitch *switchView = (UISwitch *)cell.accessoryView;
+ switchView.on = [self valueForAudioSettingAtIndexPathRow:indexPath.row];
+
+ return cell;
+}
+
+- (NSString *)labelForAudioSettingAtIndexPathRow:(NSInteger)setting {
+ switch (setting) {
+ case ARDAudioSettingsAudioOnly:
+ return @"Audio only";
+ case ARDAudioSettingsCreateAecDump:
+ return @"Create AecDump";
+ case ARDAudioSettingsUseManualAudioConfig:
+ return @"Use manual audio config";
+ default:
+ return @"";
+ }
+}
+
+- (BOOL)valueForAudioSettingAtIndexPathRow:(NSInteger)setting {
+ switch (setting) {
+ case ARDAudioSettingsAudioOnly:
+ return [_settingsModel currentAudioOnlySettingFromStore];
+ case ARDAudioSettingsCreateAecDump:
+ return [_settingsModel currentCreateAecDumpSettingFromStore];
+ case ARDAudioSettingsUseManualAudioConfig:
+ return [_settingsModel currentUseManualAudioConfigSettingFromStore];
+ default:
+ return NO;
+ }
+}
+
+- (void)audioSettingSwitchChanged:(UISwitch *)sender {
+ switch (sender.tag) {
+ case ARDAudioSettingsAudioOnly: {
+ [_settingsModel storeAudioOnlySetting:sender.isOn];
+ break;
+ }
+ case ARDAudioSettingsCreateAecDump: {
+ [_settingsModel storeCreateAecDumpSetting:sender.isOn];
+ break;
+ }
+ case ARDAudioSettingsUseManualAudioConfig: {
+ [_settingsModel storeUseManualAudioConfigSetting:sender.isOn];
+ break;
+ }
+ default:
+ break;
+ }
+}
+
+@end
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDStatsView.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDStatsView.h
new file mode 100644
index 0000000000..72207de64e
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDStatsView.h
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+#import "sdk/objc/base/RTCMacros.h"
+
+@class RTC_OBJC_TYPE(RTCStatisticsReport);
+
+@interface ARDStatsView : UIView
+
+- (void)setStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDStatsView.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDStatsView.m
new file mode 100644
index 0000000000..867ba5b09e
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDStatsView.m
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDStatsView.h"
+
+#import "sdk/objc/api/peerconnection/RTCLegacyStatsReport.h"
+
+#import "ARDStatsBuilder.h"
+
+@implementation ARDStatsView {
+ UILabel *_statsLabel;
+ ARDStatsBuilder *_statsBuilder;
+}
+
+- (instancetype)initWithFrame:(CGRect)frame {
+ if (self = [super initWithFrame:frame]) {
+ _statsLabel = [[UILabel alloc] initWithFrame:CGRectZero];
+ _statsLabel.numberOfLines = 0;
+ _statsLabel.font = [UIFont fontWithName:@"Roboto" size:12];
+ _statsLabel.adjustsFontSizeToFitWidth = YES;
+ _statsLabel.minimumScaleFactor = 0.6;
+ _statsLabel.textColor = [UIColor greenColor];
+ [self addSubview:_statsLabel];
+ self.backgroundColor = [UIColor colorWithWhite:0 alpha:.6];
+ _statsBuilder = [[ARDStatsBuilder alloc] init];
+ }
+ return self;
+}
+
+- (void)setStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats {
+ _statsBuilder.stats = stats;
+ _statsLabel.text = _statsBuilder.statsString;
+}
+
+- (void)layoutSubviews {
+ _statsLabel.frame = self.bounds;
+}
+
+- (CGSize)sizeThatFits:(CGSize)size {
+ return [_statsLabel sizeThatFits:size];
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallView.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallView.h
new file mode 100644
index 0000000000..a31c7fe742
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallView.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+#import "sdk/objc/base/RTCVideoRenderer.h"
+#import "sdk/objc/helpers/RTCCameraPreviewView.h"
+
+#import "ARDStatsView.h"
+
+@class ARDVideoCallView;
+@protocol ARDVideoCallViewDelegate <NSObject>
+
+// Called when the camera switch button is pressed.
+- (void)videoCallView:(ARDVideoCallView *)view
+ shouldSwitchCameraWithCompletion:(void (^)(NSError *))completion;
+
+// Called when the route change button is pressed.
+- (void)videoCallView:(ARDVideoCallView *)view
+ shouldChangeRouteWithCompletion:(void (^)(void))completion;
+
+// Called when the hangup button is pressed.
+- (void)videoCallViewDidHangup:(ARDVideoCallView *)view;
+
+// Called when stats are enabled by triple tapping.
+- (void)videoCallViewDidEnableStats:(ARDVideoCallView *)view;
+
+@end
+
+// Video call view that shows local and remote video, provides a label to
+// display status, and also a hangup button.
+@interface ARDVideoCallView : UIView
+
+@property(nonatomic, readonly) UILabel *statusLabel;
+@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCCameraPreviewView) * localVideoView;
+@property(nonatomic, readonly) __kindof UIView<RTC_OBJC_TYPE(RTCVideoRenderer)> *remoteVideoView;
+@property(nonatomic, readonly) ARDStatsView *statsView;
+@property(nonatomic, weak) id<ARDVideoCallViewDelegate> delegate;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallView.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallView.m
new file mode 100644
index 0000000000..437aea8d56
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallView.m
@@ -0,0 +1,213 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDVideoCallView.h"
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "sdk/objc/components/renderer/metal/RTCMTLVideoView.h"
+
+#import "UIImage+ARDUtilities.h"
+
+static CGFloat const kButtonPadding = 16;
+static CGFloat const kButtonSize = 48;
+static CGFloat const kLocalVideoViewSize = 120;
+static CGFloat const kLocalVideoViewPadding = 8;
+static CGFloat const kStatusBarHeight = 20;
+
+@interface ARDVideoCallView () <RTC_OBJC_TYPE (RTCVideoViewDelegate)>
+@end
+
+@implementation ARDVideoCallView {
+ UIButton *_routeChangeButton;
+ UIButton *_cameraSwitchButton;
+ UIButton *_hangupButton;
+ CGSize _remoteVideoSize;
+}
+
+@synthesize statusLabel = _statusLabel;
+@synthesize localVideoView = _localVideoView;
+@synthesize remoteVideoView = _remoteVideoView;
+@synthesize statsView = _statsView;
+@synthesize delegate = _delegate;
+
+- (instancetype)initWithFrame:(CGRect)frame {
+ if (self = [super initWithFrame:frame]) {
+
+ _remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero];
+
+ [self addSubview:_remoteVideoView];
+
+ _localVideoView = [[RTC_OBJC_TYPE(RTCCameraPreviewView) alloc] initWithFrame:CGRectZero];
+ [self addSubview:_localVideoView];
+
+ _statsView = [[ARDStatsView alloc] initWithFrame:CGRectZero];
+ _statsView.hidden = YES;
+ [self addSubview:_statsView];
+
+ _routeChangeButton = [UIButton buttonWithType:UIButtonTypeCustom];
+ _routeChangeButton.backgroundColor = [UIColor grayColor];
+ _routeChangeButton.layer.cornerRadius = kButtonSize / 2;
+ _routeChangeButton.layer.masksToBounds = YES;
+ UIImage *image = [UIImage imageForName:@"ic_surround_sound_black_24dp.png"
+ color:[UIColor whiteColor]];
+ [_routeChangeButton setImage:image forState:UIControlStateNormal];
+ [_routeChangeButton addTarget:self
+ action:@selector(onRouteChange:)
+ forControlEvents:UIControlEventTouchUpInside];
+ [self addSubview:_routeChangeButton];
+
+ // TODO(tkchin): don't display this if we can't actually do camera switch.
+ _cameraSwitchButton = [UIButton buttonWithType:UIButtonTypeCustom];
+ _cameraSwitchButton.backgroundColor = [UIColor grayColor];
+ _cameraSwitchButton.layer.cornerRadius = kButtonSize / 2;
+ _cameraSwitchButton.layer.masksToBounds = YES;
+ image = [UIImage imageForName:@"ic_switch_video_black_24dp.png" color:[UIColor whiteColor]];
+ [_cameraSwitchButton setImage:image forState:UIControlStateNormal];
+ [_cameraSwitchButton addTarget:self
+ action:@selector(onCameraSwitch:)
+ forControlEvents:UIControlEventTouchUpInside];
+ [self addSubview:_cameraSwitchButton];
+
+ _hangupButton = [UIButton buttonWithType:UIButtonTypeCustom];
+ _hangupButton.backgroundColor = [UIColor redColor];
+ _hangupButton.layer.cornerRadius = kButtonSize / 2;
+ _hangupButton.layer.masksToBounds = YES;
+ image = [UIImage imageForName:@"ic_call_end_black_24dp.png"
+ color:[UIColor whiteColor]];
+ [_hangupButton setImage:image forState:UIControlStateNormal];
+ [_hangupButton addTarget:self
+ action:@selector(onHangup:)
+ forControlEvents:UIControlEventTouchUpInside];
+ [self addSubview:_hangupButton];
+
+ _statusLabel = [[UILabel alloc] initWithFrame:CGRectZero];
+ _statusLabel.font = [UIFont fontWithName:@"Roboto" size:16];
+ _statusLabel.textColor = [UIColor whiteColor];
+ [self addSubview:_statusLabel];
+
+ UITapGestureRecognizer *tapRecognizer =
+ [[UITapGestureRecognizer alloc]
+ initWithTarget:self
+ action:@selector(didTripleTap:)];
+ tapRecognizer.numberOfTapsRequired = 3;
+ [self addGestureRecognizer:tapRecognizer];
+ }
+ return self;
+}
+
+- (void)layoutSubviews {
+ CGRect bounds = self.bounds;
+ if (_remoteVideoSize.width > 0 && _remoteVideoSize.height > 0) {
+ // Aspect fill remote video into bounds.
+ CGRect remoteVideoFrame =
+ AVMakeRectWithAspectRatioInsideRect(_remoteVideoSize, bounds);
+ CGFloat scale = 1;
+ if (remoteVideoFrame.size.width > remoteVideoFrame.size.height) {
+ // Scale by height.
+ scale = bounds.size.height / remoteVideoFrame.size.height;
+ } else {
+ // Scale by width.
+ scale = bounds.size.width / remoteVideoFrame.size.width;
+ }
+ remoteVideoFrame.size.height *= scale;
+ remoteVideoFrame.size.width *= scale;
+ _remoteVideoView.frame = remoteVideoFrame;
+ _remoteVideoView.center =
+ CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds));
+ } else {
+ _remoteVideoView.frame = bounds;
+ }
+
+ // Aspect fit local video view into a square box.
+ CGRect localVideoFrame =
+ CGRectMake(0, 0, kLocalVideoViewSize, kLocalVideoViewSize);
+ // Place the view in the bottom right.
+ localVideoFrame.origin.x = CGRectGetMaxX(bounds)
+ - localVideoFrame.size.width - kLocalVideoViewPadding;
+ localVideoFrame.origin.y = CGRectGetMaxY(bounds)
+ - localVideoFrame.size.height - kLocalVideoViewPadding;
+ _localVideoView.frame = localVideoFrame;
+
+ // Place stats at the top.
+ CGSize statsSize = [_statsView sizeThatFits:bounds.size];
+ _statsView.frame = CGRectMake(CGRectGetMinX(bounds),
+ CGRectGetMinY(bounds) + kStatusBarHeight,
+ statsSize.width, statsSize.height);
+
+ // Place hangup button in the bottom left.
+ _hangupButton.frame =
+ CGRectMake(CGRectGetMinX(bounds) + kButtonPadding,
+ CGRectGetMaxY(bounds) - kButtonPadding -
+ kButtonSize,
+ kButtonSize,
+ kButtonSize);
+
+ // Place button to the right of hangup button.
+ CGRect cameraSwitchFrame = _hangupButton.frame;
+ cameraSwitchFrame.origin.x =
+ CGRectGetMaxX(cameraSwitchFrame) + kButtonPadding;
+ _cameraSwitchButton.frame = cameraSwitchFrame;
+
+ // Place route button to the right of camera button.
+ CGRect routeChangeFrame = _cameraSwitchButton.frame;
+ routeChangeFrame.origin.x =
+ CGRectGetMaxX(routeChangeFrame) + kButtonPadding;
+ _routeChangeButton.frame = routeChangeFrame;
+
+ [_statusLabel sizeToFit];
+ _statusLabel.center =
+ CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds));
+}
+
+#pragma mark - RTC_OBJC_TYPE(RTCVideoViewDelegate)
+
+- (void)videoView:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoView didChangeVideoSize:(CGSize)size {
+ if (videoView == _remoteVideoView) {
+ _remoteVideoSize = size;
+ }
+ [self setNeedsLayout];
+}
+
+#pragma mark - Private
+
+- (void)onCameraSwitch:(UIButton *)sender {
+ sender.enabled = false;
+ [_delegate videoCallView:self
+ shouldSwitchCameraWithCompletion:^(NSError *error) {
+ dispatch_async(dispatch_get_main_queue(), ^(void) {
+ sender.enabled = true;
+ });
+ }];
+}
+
+- (void)onRouteChange:(UIButton *)sender {
+ sender.enabled = false;
+ __weak ARDVideoCallView *weakSelf = self;
+ [_delegate videoCallView:self
+ shouldChangeRouteWithCompletion:^(void) {
+ ARDVideoCallView *strongSelf = weakSelf;
+ if (strongSelf) {
+ dispatch_async(dispatch_get_main_queue(), ^(void) {
+ sender.enabled = true;
+ });
+ }
+ }];
+}
+
+- (void)onHangup:(id)sender {
+ [_delegate videoCallViewDidHangup:self];
+}
+
+- (void)didTripleTap:(UITapGestureRecognizer *)recognizer {
+ [_delegate videoCallViewDidEnableStats:self];
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.h
new file mode 100644
index 0000000000..bdb8747524
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+@class ARDVideoCallViewController;
+@protocol ARDVideoCallViewControllerDelegate <NSObject>
+
+- (void)viewControllerDidFinish:(ARDVideoCallViewController *)viewController;
+
+@end
+
+@interface ARDVideoCallViewController : UIViewController
+
+@property(nonatomic, weak) id<ARDVideoCallViewControllerDelegate> delegate;
+
+- (instancetype)initForRoom:(NSString *)room
+ isLoopback:(BOOL)isLoopback
+ delegate:(id<ARDVideoCallViewControllerDelegate>)delegate;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.m
new file mode 100644
index 0000000000..a82d90b290
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.m
@@ -0,0 +1,250 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDVideoCallViewController.h"
+
+#import "sdk/objc/api/peerconnection/RTCMediaConstraints.h"
+#import "sdk/objc/base/RTCLogging.h"
+#import "sdk/objc/components/audio/RTCAudioSession.h"
+#import "sdk/objc/components/capturer/RTCCameraVideoCapturer.h"
+#import "sdk/objc/helpers/RTCDispatcher.h"
+
+#import "ARDAppClient.h"
+#import "ARDCaptureController.h"
+#import "ARDFileCaptureController.h"
+#import "ARDSettingsModel.h"
+#import "ARDVideoCallView.h"
+
+@interface ARDVideoCallViewController () <ARDAppClientDelegate,
+ ARDVideoCallViewDelegate,
+ RTC_OBJC_TYPE (RTCAudioSessionDelegate)>
+@property(nonatomic, strong) RTC_OBJC_TYPE(RTCVideoTrack) * remoteVideoTrack;
+@property(nonatomic, readonly) ARDVideoCallView *videoCallView;
+@property(nonatomic, assign) AVAudioSessionPortOverride portOverride;
+@end
+
+@implementation ARDVideoCallViewController {
+ ARDAppClient *_client;
+ RTC_OBJC_TYPE(RTCVideoTrack) * _remoteVideoTrack;
+ ARDCaptureController *_captureController;
+ ARDFileCaptureController *_fileCaptureController NS_AVAILABLE_IOS(10);
+}
+
+@synthesize videoCallView = _videoCallView;
+@synthesize remoteVideoTrack = _remoteVideoTrack;
+@synthesize delegate = _delegate;
+@synthesize portOverride = _portOverride;
+
+- (instancetype)initForRoom:(NSString *)room
+ isLoopback:(BOOL)isLoopback
+ delegate:(id<ARDVideoCallViewControllerDelegate>)delegate {
+ if (self = [super init]) {
+ ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
+ _delegate = delegate;
+
+ _client = [[ARDAppClient alloc] initWithDelegate:self];
+ [_client connectToRoomWithId:room settings:settingsModel isLoopback:isLoopback];
+ }
+ return self;
+}
+
+- (void)loadView {
+ _videoCallView = [[ARDVideoCallView alloc] initWithFrame:CGRectZero];
+ _videoCallView.delegate = self;
+ _videoCallView.statusLabel.text =
+ [self statusTextForState:RTCIceConnectionStateNew];
+ self.view = _videoCallView;
+
+ RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session addDelegate:self];
+}
+
+- (UIInterfaceOrientationMask)supportedInterfaceOrientations {
+ return UIInterfaceOrientationMaskAll;
+}
+
+#pragma mark - ARDAppClientDelegate
+
+- (void)appClient:(ARDAppClient *)client
+ didChangeState:(ARDAppClientState)state {
+ switch (state) {
+ case kARDAppClientStateConnected:
+ RTCLog(@"Client connected.");
+ break;
+ case kARDAppClientStateConnecting:
+ RTCLog(@"Client connecting.");
+ break;
+ case kARDAppClientStateDisconnected:
+ RTCLog(@"Client disconnected.");
+ [self hangup];
+ break;
+ }
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didChangeConnectionState:(RTCIceConnectionState)state {
+ RTCLog(@"ICE state changed: %ld", (long)state);
+ __weak ARDVideoCallViewController *weakSelf = self;
+ dispatch_async(dispatch_get_main_queue(), ^{
+ ARDVideoCallViewController *strongSelf = weakSelf;
+ strongSelf.videoCallView.statusLabel.text =
+ [strongSelf statusTextForState:state];
+ });
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer {
+ _videoCallView.localVideoView.captureSession = localCapturer.captureSession;
+ ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
+ _captureController =
+ [[ARDCaptureController alloc] initWithCapturer:localCapturer settings:settingsModel];
+ [_captureController startCapture];
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didCreateLocalFileCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)fileCapturer {
+#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
+ if (@available(iOS 10, *)) {
+ _fileCaptureController = [[ARDFileCaptureController alloc] initWithCapturer:fileCapturer];
+ [_fileCaptureController startCapture];
+ }
+#endif
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didReceiveLocalVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)localVideoTrack {
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
+ self.remoteVideoTrack = remoteVideoTrack;
+ __weak ARDVideoCallViewController *weakSelf = self;
+ dispatch_async(dispatch_get_main_queue(), ^{
+ ARDVideoCallViewController *strongSelf = weakSelf;
+ strongSelf.videoCallView.statusLabel.hidden = YES;
+ });
+}
+
+- (void)appClient:(ARDAppClient *)client didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats {
+ _videoCallView.statsView.stats = stats;
+ [_videoCallView setNeedsLayout];
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didError:(NSError *)error {
+ NSString *message =
+ [NSString stringWithFormat:@"%@", error.localizedDescription];
+ [self hangup];
+ [self showAlertWithMessage:message];
+}
+
+#pragma mark - ARDVideoCallViewDelegate
+
+- (void)videoCallViewDidHangup:(ARDVideoCallView *)view {
+ [self hangup];
+}
+
+- (void)videoCallView:(ARDVideoCallView *)view
+ shouldSwitchCameraWithCompletion:(void (^)(NSError *))completion {
+ [_captureController switchCamera:completion];
+}
+
+- (void)videoCallView:(ARDVideoCallView *)view
+ shouldChangeRouteWithCompletion:(void (^)(void))completion {
+ NSParameterAssert(completion);
+ AVAudioSessionPortOverride override = AVAudioSessionPortOverrideNone;
+ if (_portOverride == AVAudioSessionPortOverrideNone) {
+ override = AVAudioSessionPortOverrideSpeaker;
+ }
+ [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeAudioSession
+ block:^{
+ RTC_OBJC_TYPE(RTCAudioSession) *session =
+ [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session lockForConfiguration];
+ NSError *error = nil;
+ if ([session overrideOutputAudioPort:override
+ error:&error]) {
+ self.portOverride = override;
+ } else {
+ RTCLogError(@"Error overriding output port: %@",
+ error.localizedDescription);
+ }
+ [session unlockForConfiguration];
+ completion();
+ }];
+}
+
+- (void)videoCallViewDidEnableStats:(ARDVideoCallView *)view {
+ _client.shouldGetStats = YES;
+ _videoCallView.statsView.hidden = NO;
+}
+
+#pragma mark - RTC_OBJC_TYPE(RTCAudioSessionDelegate)
+
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
+ didDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches {
+ RTCLog(@"Audio session detected glitch, total: %lld", totalNumberOfGlitches);
+}
+
+#pragma mark - Private
+
+- (void)setRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
+ if (_remoteVideoTrack == remoteVideoTrack) {
+ return;
+ }
+ [_remoteVideoTrack removeRenderer:_videoCallView.remoteVideoView];
+ _remoteVideoTrack = nil;
+ [_videoCallView.remoteVideoView renderFrame:nil];
+ _remoteVideoTrack = remoteVideoTrack;
+ [_remoteVideoTrack addRenderer:_videoCallView.remoteVideoView];
+}
+
+- (void)hangup {
+ self.remoteVideoTrack = nil;
+ _videoCallView.localVideoView.captureSession = nil;
+ [_captureController stopCapture];
+ _captureController = nil;
+ [_fileCaptureController stopCapture];
+ _fileCaptureController = nil;
+ [_client disconnect];
+ [_delegate viewControllerDidFinish:self];
+}
+
+- (NSString *)statusTextForState:(RTCIceConnectionState)state {
+ switch (state) {
+ case RTCIceConnectionStateNew:
+ case RTCIceConnectionStateChecking:
+ return @"Connecting...";
+ case RTCIceConnectionStateConnected:
+ case RTCIceConnectionStateCompleted:
+ case RTCIceConnectionStateFailed:
+ case RTCIceConnectionStateDisconnected:
+ case RTCIceConnectionStateClosed:
+ case RTCIceConnectionStateCount:
+ return nil;
+ }
+}
+
+- (void)showAlertWithMessage:(NSString*)message {
+ UIAlertController *alert =
+ [UIAlertController alertControllerWithTitle:nil
+ message:message
+ preferredStyle:UIAlertControllerStyleAlert];
+
+ UIAlertAction *defaultAction = [UIAlertAction actionWithTitle:@"OK"
+ style:UIAlertActionStyleDefault
+ handler:^(UIAlertAction *action){
+ }];
+
+ [alert addAction:defaultAction];
+ [self presentViewController:alert animated:YES completion:nil];
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/Info.plist b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/Info.plist
new file mode 100644
index 0000000000..a2f0a683ed
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/Info.plist
@@ -0,0 +1,109 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>BuildMachineOSBuild</key>
+ <string>12E55</string>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleDisplayName</key>
+ <string>AppRTCMobile</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIcons</key>
+ <dict>
+ <key>CFBundlePrimaryIcon</key>
+ <dict>
+ <key>CFBundleIconFiles</key>
+ <array>
+ <string>Icon.png</string>
+ <string>Icon-120.png</string>
+ <string>Icon-180.png</string>
+ </array>
+ </dict>
+ </dict>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.AppRTCMobile</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleSupportedPlatforms</key>
+ <array>
+ <string>iPhoneOS</string>
+ </array>
+ <key>CFBundleVersion</key>
+ <string>1.0</string>
+ <key>UIStatusBarTintParameters</key>
+ <dict>
+ <key>UINavigationBar</key>
+ <dict>
+ <key>Style</key>
+ <string>UIBarStyleDefault</string>
+ <key>Translucent</key>
+ <false/>
+ </dict>
+ </dict>
+ <key>UISupportedInterfaceOrientations</key>
+ <array>
+ <string>UIInterfaceOrientationPortrait</string>
+ <string>UIInterfaceOrientationLandscapeLeft</string>
+ <string>UIInterfaceOrientationLandscapeRight</string>
+ <string>UIInterfaceOrientationPortraitUpsideDown</string>
+ </array>
+ <key>UIAppFonts</key>
+ <array>
+ <string>Roboto-Regular.ttf</string>
+ </array>
+ <key>UIBackgroundModes</key>
+ <array>
+ <string>audio</string>
+ <string>voip</string>
+ </array>
+ <key>NSCameraUsageDescription</key>
+ <string>Camera access needed for video calling</string>
+ <key>NSMicrophoneUsageDescription</key>
+ <string>Microphone access needed for video calling</string>
+ <key>UIFileSharingEnabled</key>
+ <true/>
+ <key>UILaunchImages</key>
+ <array>
+ <dict>
+ <key>UILaunchImageMinimumOSVersion</key>
+ <string>7.0</string>
+ <key>UILaunchImageName</key>
+ <string>iPhone5</string>
+ <key>UILaunchImageOrientation</key>
+ <string>Portrait</string>
+ <key>UILaunchImageSize</key>
+ <string>{320, 568}</string>
+ </dict>
+ <dict>
+ <key>UILaunchImageMinimumOSVersion</key>
+ <string>8.0</string>
+ <key>UILaunchImageName</key>
+ <string>iPhone6</string>
+ <key>UILaunchImageOrientation</key>
+ <string>Portrait</string>
+ <key>UILaunchImageSize</key>
+ <string>{375, 667}</string>
+ </dict>
+ <dict>
+ <key>UILaunchImageMinimumOSVersion</key>
+ <string>8.0</string>
+ <key>UILaunchImageName</key>
+ <string>iPhone6p</string>
+ <key>UILaunchImageOrientation</key>
+ <string>Portrait</string>
+ <key>UILaunchImageSize</key>
+ <string>{414, 736}</string>
+ </dict>
+ </array>
+</dict>
+</plist>
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.h
new file mode 100644
index 0000000000..3a93c253b2
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.h
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "sdk/objc/base/RTCVideoCodecInfo.h"
+
+@interface RTC_OBJC_TYPE (RTCVideoCodecInfo)
+(HumanReadable)
+
+ - (NSString *)humanReadableDescription;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.m
new file mode 100644
index 0000000000..5e0c52c5c4
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.m
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoCodecInfo+HumanReadable.h"
+
+#import "sdk/objc/components/video_codec/RTCH264ProfileLevelId.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoCodecInfo)
+(HumanReadable)
+
+ - (NSString *)humanReadableDescription {
+ if ([self.name isEqualToString:@"H264"]) {
+ NSString *profileId = self.parameters[@"profile-level-id"];
+ RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId =
+ [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithHexString:profileId];
+ if (profileLevelId.profile == RTCH264ProfileConstrainedHigh ||
+ profileLevelId.profile == RTCH264ProfileHigh) {
+ return @"H264 (High)";
+ } else if (profileLevelId.profile == RTCH264ProfileConstrainedBaseline ||
+ profileLevelId.profile == RTCH264ProfileBaseline) {
+ return @"H264 (Baseline)";
+ } else {
+ return [NSString stringWithFormat:@"H264 (%@)", profileId];
+ }
+ } else {
+ return self.name;
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/UIImage+ARDUtilities.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/UIImage+ARDUtilities.h
new file mode 100644
index 0000000000..d56ba02c2e
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/UIImage+ARDUtilities.h
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+@interface UIImage (ARDUtilities)
+
+// Returns an color tinted version for the given image resource.
++ (UIImage *)imageForName:(NSString *)name color:(UIColor *)color;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/UIImage+ARDUtilities.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/UIImage+ARDUtilities.m
new file mode 100644
index 0000000000..1bbe8c342f
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/UIImage+ARDUtilities.m
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "UIImage+ARDUtilities.h"
+
+@implementation UIImage (ARDUtilities)
+
++ (UIImage *)imageForName:(NSString *)name color:(UIColor *)color {
+ UIImage *image = [UIImage imageNamed:name];
+ if (!image) {
+ return nil;
+ }
+ UIGraphicsBeginImageContextWithOptions(image.size, NO, 0.0f);
+ [color setFill];
+ CGRect bounds = CGRectMake(0, 0, image.size.width, image.size.height);
+ UIRectFill(bounds);
+ [image drawInRect:bounds blendMode:kCGBlendModeDestinationIn alpha:1.0f];
+ UIImage *coloredImage = UIGraphicsGetImageFromCurrentImageContext();
+ UIGraphicsEndImageContext();
+
+ return coloredImage;
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.h
new file mode 100644
index 0000000000..2c4a56368a
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <ReplayKit/ReplayKit.h>
+
+#import "sdk/objc/base/RTCLogging.h"
+
+#import "ARDAppClient.h"
+
+@protocol ARDExternalSampleDelegate;
+
+API_AVAILABLE(ios(10.0))
+@interface ARDBroadcastSampleHandler : RPBroadcastSampleHandler <ARDAppClientDelegate>
+
+@property(nonatomic, strong) id<ARDExternalSampleDelegate> capturer;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.m
new file mode 100644
index 0000000000..1c276d965f
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.m
@@ -0,0 +1,130 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDBroadcastSampleHandler.h"
+
+#import <os/log.h>
+
+#import "ARDExternalSampleCapturer.h"
+#import "ARDSettingsModel.h"
+
+#import "sdk/objc/api/logging/RTCCallbackLogger.h"
+#import "sdk/objc/base/RTCLogging.h"
+
+@implementation ARDBroadcastSampleHandler {
+ ARDAppClient *_client;
+ RTC_OBJC_TYPE(RTCCallbackLogger) * _callbackLogger;
+}
+
+@synthesize capturer = _capturer;
+
+- (instancetype)init {
+ if (self = [super init]) {
+ _callbackLogger = [[RTC_OBJC_TYPE(RTCCallbackLogger) alloc] init];
+ os_log_t rtc_os_log = os_log_create("com.google.AppRTCMobile", "RTCLog");
+ [_callbackLogger start:^(NSString *logMessage) {
+ os_log(rtc_os_log, "%{public}s", [logMessage cStringUsingEncoding:NSUTF8StringEncoding]);
+ }];
+ }
+ return self;
+}
+
+- (void)broadcastStartedWithSetupInfo:(NSDictionary<NSString *, NSObject *> *)setupInfo {
+ // User has requested to start the broadcast. Setup info from the UI extension can be supplied but
+ // optional.
+ ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
+
+ _client = [[ARDAppClient alloc] initWithDelegate:self];
+ _client.broadcast = YES;
+
+ NSString *roomName = nil;
+ if (setupInfo[@"roomName"]) {
+ roomName = (NSString *)setupInfo[@"roomName"];
+ } else {
+ u_int32_t randomRoomSuffix = arc4random_uniform(1000);
+ roomName = [NSString stringWithFormat:@"broadcast_%d", randomRoomSuffix];
+ }
+ [_client connectToRoomWithId:roomName settings:settingsModel isLoopback:NO];
+ RTCLog(@"Broadcast started.");
+}
+
+- (void)broadcastPaused {
+ // User has requested to pause the broadcast. Samples will stop being delivered.
+}
+
+- (void)broadcastResumed {
+ // User has requested to resume the broadcast. Samples delivery will resume.
+}
+
+- (void)broadcastFinished {
+ // User has requested to finish the broadcast.
+ [_client disconnect];
+}
+
+- (void)processSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ withType:(RPSampleBufferType)sampleBufferType {
+ switch (sampleBufferType) {
+ case RPSampleBufferTypeVideo:
+ [self.capturer didCaptureSampleBuffer:sampleBuffer];
+ break;
+ case RPSampleBufferTypeAudioApp:
+ break;
+ case RPSampleBufferTypeAudioMic:
+ break;
+ default:
+ break;
+ }
+}
+
+#pragma mark - ARDAppClientDelegate
+
+- (void)appClient:(ARDAppClient *)client didChangeState:(ARDAppClientState)state {
+ switch (state) {
+ case kARDAppClientStateConnected:
+ RTCLog(@"Client connected.");
+ break;
+ case kARDAppClientStateConnecting:
+ RTCLog("Client connecting.");
+ break;
+ case kARDAppClientStateDisconnected:
+ RTCLog(@"Client disconnected.");
+ break;
+ }
+}
+
+- (void)appClient:(ARDAppClient *)client didChangeConnectionState:(RTCIceConnectionState)state {
+ RTCLog(@"ICE state changed: %ld", (long)state);
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer {
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didCreateLocalExternalSampleCapturer:(ARDExternalSampleCapturer *)externalSampleCapturer {
+ self.capturer = externalSampleCapturer;
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didReceiveLocalVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)localVideoTrack {
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
+}
+
+- (void)appClient:(ARDAppClient *)client didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats {
+}
+
+- (void)appClient:(ARDAppClient *)client didError:(NSError *)error {
+ RTCLog(@"Error: %@", error);
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.h
new file mode 100644
index 0000000000..bbf397d8a9
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <ReplayKit/ReplayKit.h>
+#import <UIKit/UIKit.h>
+
+API_AVAILABLE(ios(11.0))
+@interface ARDBroadcastSetupViewController : UIViewController <UITextFieldDelegate>
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.m
new file mode 100644
index 0000000000..55438f17d8
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.m
@@ -0,0 +1,107 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDBroadcastSetupViewController.h"
+
+@implementation ARDBroadcastSetupViewController {
+ UITextField *_roomNameField;
+}
+
+- (void)loadView {
+ UIView *view = [[UIView alloc] initWithFrame:CGRectZero];
+ view.backgroundColor = [UIColor colorWithWhite:1.0 alpha:0.7];
+
+ UIImageView *imageView = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"Icon-180"]];
+ imageView.translatesAutoresizingMaskIntoConstraints = NO;
+ [view addSubview:imageView];
+
+ _roomNameField = [[UITextField alloc] initWithFrame:CGRectZero];
+ _roomNameField.borderStyle = UITextBorderStyleRoundedRect;
+ _roomNameField.font = [UIFont systemFontOfSize:14.0];
+ _roomNameField.translatesAutoresizingMaskIntoConstraints = NO;
+ _roomNameField.placeholder = @"Room name";
+ _roomNameField.returnKeyType = UIReturnKeyDone;
+ _roomNameField.delegate = self;
+ [view addSubview:_roomNameField];
+
+ UIButton *doneButton = [UIButton buttonWithType:UIButtonTypeSystem];
+ doneButton.translatesAutoresizingMaskIntoConstraints = NO;
+ doneButton.titleLabel.font = [UIFont systemFontOfSize:20.0];
+ [doneButton setTitle:@"Done" forState:UIControlStateNormal];
+ [doneButton addTarget:self
+ action:@selector(userDidFinishSetup)
+ forControlEvents:UIControlEventTouchUpInside];
+ [view addSubview:doneButton];
+
+ UIButton *cancelButton = [UIButton buttonWithType:UIButtonTypeSystem];
+ cancelButton.translatesAutoresizingMaskIntoConstraints = NO;
+ cancelButton.titleLabel.font = [UIFont systemFontOfSize:20.0];
+ [cancelButton setTitle:@"Cancel" forState:UIControlStateNormal];
+ [cancelButton addTarget:self
+ action:@selector(userDidCancelSetup)
+ forControlEvents:UIControlEventTouchUpInside];
+ [view addSubview:cancelButton];
+
+ UILayoutGuide *margin = view.layoutMarginsGuide;
+ [imageView.widthAnchor constraintEqualToConstant:60.0].active = YES;
+ [imageView.heightAnchor constraintEqualToConstant:60.0].active = YES;
+ [imageView.topAnchor constraintEqualToAnchor:margin.topAnchor constant:20].active = YES;
+ [imageView.centerXAnchor constraintEqualToAnchor:view.centerXAnchor].active = YES;
+
+ [_roomNameField.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor].active = YES;
+ [_roomNameField.topAnchor constraintEqualToAnchor:imageView.bottomAnchor constant:20].active =
+ YES;
+ [_roomNameField.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor].active = YES;
+
+ [doneButton.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor].active = YES;
+ [doneButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor constant:-20].active = YES;
+
+ [cancelButton.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor].active = YES;
+ [cancelButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor constant:-20].active = YES;
+
+ UITapGestureRecognizer *tgr =
+ [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(didTap:)];
+ [view addGestureRecognizer:tgr];
+
+ self.view = view;
+}
+
+- (IBAction)didTap:(id)sender {
+ [self.view endEditing:YES];
+}
+
+- (void)userDidFinishSetup {
+ // URL of the resource where broadcast can be viewed that will be returned to the application
+ NSURL *broadcastURL = [NSURL
+ URLWithString:[NSString stringWithFormat:@"https://appr.tc/r/%@", _roomNameField.text]];
+
+ // Dictionary with setup information that will be provided to broadcast extension when broadcast
+ // is started
+ NSDictionary *setupInfo = @{@"roomName" : _roomNameField.text};
+
+ // Tell ReplayKit that the extension is finished setting up and can begin broadcasting
+ [self.extensionContext completeRequestWithBroadcastURL:broadcastURL setupInfo:setupInfo];
+}
+
+- (void)userDidCancelSetup {
+ // Tell ReplayKit that the extension was cancelled by the user
+ [self.extensionContext cancelRequestWithError:[NSError errorWithDomain:@"com.google.AppRTCMobile"
+ code:-1
+ userInfo:nil]];
+}
+
+#pragma mark - UITextFieldDelegate
+
+- (BOOL)textFieldShouldReturn:(UITextField *)textField {
+ [self userDidFinishSetup];
+ return YES;
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/BroadcastSetupUIInfo.plist b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/BroadcastSetupUIInfo.plist
new file mode 100644
index 0000000000..a123c111e5
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/BroadcastSetupUIInfo.plist
@@ -0,0 +1,39 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleDisplayName</key>
+ <string>AppRTCMobile</string>
+ <key>CFBundleExecutable</key>
+ <string>$(EXECUTABLE_NAME)</string>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.AppRTCMobile.BroadcastSetupUI</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>$(PRODUCT_NAME)</string>
+ <key>CFBundlePackageType</key>
+ <string>XPC!</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>NSExtension</key>
+ <dict>
+ <key>NSExtensionAttributes</key>
+ <dict>
+ <key>NSExtensionActivationRule</key>
+ <dict>
+ <key>NSExtensionActivationSupportsReplayKitStreaming</key>
+ <true/>
+ </dict>
+ </dict>
+ <key>NSExtensionPointIdentifier</key>
+ <string>com.apple.broadcast-services-setupui</string>
+ <key>NSExtensionPrincipalClass</key>
+ <string>ARDBroadcastSetupViewController</string>
+ </dict>
+</dict>
+</plist>
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/BroadcastUploadInfo.plist b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/BroadcastUploadInfo.plist
new file mode 100644
index 0000000000..2bab60ea8f
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/BroadcastUploadInfo.plist
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleDisplayName</key>
+ <string>AppRTCMobile</string>
+ <key>CFBundleExecutable</key>
+ <string>$(EXECUTABLE_NAME)</string>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.AppRTCMobile.BroadcastUpload</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>$(PRODUCT_NAME)</string>
+ <key>CFBundlePackageType</key>
+ <string>XPC!</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>NSExtension</key>
+ <dict>
+ <key>NSExtensionPointIdentifier</key>
+ <string>com.apple.broadcast-services-upload</string>
+ <key>NSExtensionPrincipalClass</key>
+ <string>ARDBroadcastSampleHandler</string>
+ <key>RPBroadcastProcessMode</key>
+ <string>RPBroadcastProcessModeSampleBuffer</string>
+ </dict>
+</dict>
+</plist>
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/main.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/main.m
new file mode 100644
index 0000000000..00b83f7fd2
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/main.m
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2013 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+#import "ARDAppDelegate.h"
+
+int main(int argc, char* argv[]) {
+ @autoreleasepool {
+ return UIApplicationMain(
+ argc, argv, nil, NSStringFromClass([ARDAppDelegate class]));
+ }
+}
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/Roboto-Regular.ttf b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/Roboto-Regular.ttf
new file mode 100644
index 0000000000..0e58508a64
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/Roboto-Regular.ttf
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/foreman.mp4 b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/foreman.mp4
new file mode 100644
index 0000000000..ccffbf4722
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/foreman.mp4
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/iPhone5@2x.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/iPhone5@2x.png
new file mode 100644
index 0000000000..9d005fde06
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/iPhone5@2x.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/iPhone6@2x.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/iPhone6@2x.png
new file mode 100644
index 0000000000..fce3eb95b3
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/iPhone6@2x.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/iPhone6p@3x.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/iPhone6p@3x.png
new file mode 100644
index 0000000000..aee20c2209
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/iPhone6p@3x.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_call_end_black_24dp.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_call_end_black_24dp.png
new file mode 100644
index 0000000000..531cb0f280
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_call_end_black_24dp.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_call_end_black_24dp@2x.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_call_end_black_24dp@2x.png
new file mode 100644
index 0000000000..03dd381c10
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_call_end_black_24dp@2x.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_clear_black_24dp.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_clear_black_24dp.png
new file mode 100644
index 0000000000..4ebf8a2270
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_clear_black_24dp.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_clear_black_24dp@2x.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_clear_black_24dp@2x.png
new file mode 100644
index 0000000000..ed2b2525fd
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_clear_black_24dp@2x.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_settings_black_24dp.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_settings_black_24dp.png
new file mode 100644
index 0000000000..c59419c02b
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_settings_black_24dp.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_settings_black_24dp@2x.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_settings_black_24dp@2x.png
new file mode 100644
index 0000000000..e84e188a1d
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_settings_black_24dp@2x.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_surround_sound_black_24dp.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_surround_sound_black_24dp.png
new file mode 100644
index 0000000000..8f3343d3a7
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_surround_sound_black_24dp.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_surround_sound_black_24dp@2x.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_surround_sound_black_24dp@2x.png
new file mode 100644
index 0000000000..764880467a
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_surround_sound_black_24dp@2x.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_switch_video_black_24dp.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_switch_video_black_24dp.png
new file mode 100644
index 0000000000..85271c8253
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_switch_video_black_24dp.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_switch_video_black_24dp@2x.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_switch_video_black_24dp@2x.png
new file mode 100644
index 0000000000..62b13a6a09
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_switch_video_black_24dp@2x.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/mozart.mp3 b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/mozart.mp3
new file mode 100644
index 0000000000..5981ba3a91
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/mozart.mp3
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCAppDelegate.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCAppDelegate.h
new file mode 100644
index 0000000000..2b3ce094a2
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCAppDelegate.h
@@ -0,0 +1,14 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Cocoa/Cocoa.h>
+
+@interface APPRTCAppDelegate : NSObject <NSApplicationDelegate>
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCAppDelegate.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCAppDelegate.m
new file mode 100644
index 0000000000..36a470021d
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCAppDelegate.m
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "APPRTCAppDelegate.h"
+#import "APPRTCViewController.h"
+#import "sdk/objc/api/peerconnection/RTCSSLAdapter.h"
+
+@interface APPRTCAppDelegate () <NSWindowDelegate>
+@end
+
+@implementation APPRTCAppDelegate {
+ APPRTCViewController* _viewController;
+ NSWindow* _window;
+}
+
+#pragma mark - NSApplicationDelegate
+
+- (void)applicationDidFinishLaunching:(NSNotification*)notification {
+ RTCInitializeSSL();
+ NSScreen* screen = [NSScreen mainScreen];
+ NSRect visibleRect = [screen visibleFrame];
+ NSRect windowRect = NSMakeRect(NSMidX(visibleRect),
+ NSMidY(visibleRect),
+ 1320,
+ 1140);
+ NSUInteger styleMask = NSWindowStyleMaskTitled | NSWindowStyleMaskClosable;
+ _window = [[NSWindow alloc] initWithContentRect:windowRect
+ styleMask:styleMask
+ backing:NSBackingStoreBuffered
+ defer:NO];
+ _window.delegate = self;
+ [_window makeKeyAndOrderFront:self];
+ [_window makeMainWindow];
+ _viewController = [[APPRTCViewController alloc] initWithNibName:nil
+ bundle:nil];
+ [_window setContentView:[_viewController view]];
+}
+
+#pragma mark - NSWindow
+
+- (void)windowWillClose:(NSNotification*)notification {
+ [_viewController windowWillClose:notification];
+ RTCCleanupSSL();
+ [NSApp terminate:self];
+}
+
+@end
+
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCViewController.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCViewController.h
new file mode 100644
index 0000000000..306ecd9c7f
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCViewController.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AppKit/AppKit.h>
+
+@interface APPRTCViewController : NSViewController
+
+- (void)windowWillClose:(NSNotification*)notification;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCViewController.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCViewController.m
new file mode 100644
index 0000000000..982fa56b43
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCViewController.m
@@ -0,0 +1,407 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "APPRTCViewController.h"
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "sdk/objc/api/peerconnection/RTCVideoTrack.h"
+#import "sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h"
+
+#import "ARDAppClient.h"
+#import "ARDCaptureController.h"
+#import "ARDSettingsModel.h"
+
+static NSUInteger const kContentWidth = 900;
+static NSUInteger const kRoomFieldWidth = 200;
+static NSUInteger const kActionItemHeight = 30;
+static NSUInteger const kBottomViewHeight = 200;
+
+@class APPRTCMainView;
+@protocol APPRTCMainViewDelegate
+
+- (void)appRTCMainView:(APPRTCMainView*)mainView
+ didEnterRoomId:(NSString*)roomId
+ loopback:(BOOL)isLoopback;
+
+@end
+
+@interface APPRTCMainView : NSView
+
+@property(nonatomic, weak) id<APPRTCMainViewDelegate> delegate;
+@property(nonatomic, readonly) NSView<RTC_OBJC_TYPE(RTCVideoRenderer)>* localVideoView;
+@property(nonatomic, readonly) NSView<RTC_OBJC_TYPE(RTCVideoRenderer)>* remoteVideoView;
+@property(nonatomic, readonly) NSTextView* logView;
+
+- (void)displayLogMessage:(NSString*)message;
+
+@end
+
+@interface APPRTCMainView () <NSTextFieldDelegate, RTC_OBJC_TYPE (RTCVideoViewDelegate)>
+@end
+@implementation APPRTCMainView {
+ NSScrollView* _scrollView;
+ NSView* _actionItemsView;
+ NSButton* _connectButton;
+ NSButton* _loopbackButton;
+ NSTextField* _roomField;
+ CGSize _localVideoSize;
+ CGSize _remoteVideoSize;
+}
+
+@synthesize delegate = _delegate;
+@synthesize localVideoView = _localVideoView;
+@synthesize remoteVideoView = _remoteVideoView;
+@synthesize logView = _logView;
+
+- (void)displayLogMessage:(NSString *)message {
+ dispatch_async(dispatch_get_main_queue(), ^{
+ self.logView.string = [NSString stringWithFormat:@"%@%@\n", self.logView.string, message];
+ NSRange range = NSMakeRange(self.logView.string.length, 0);
+ [self.logView scrollRangeToVisible:range];
+ });
+}
+
+#pragma mark - Private
+
+- (instancetype)initWithFrame:(NSRect)frame {
+ if (self = [super initWithFrame:frame]) {
+ [self setupViews];
+ }
+ return self;
+}
+
++ (BOOL)requiresConstraintBasedLayout {
+ return YES;
+}
+
+- (void)updateConstraints {
+ NSParameterAssert(
+ _roomField != nil &&
+ _scrollView != nil &&
+ _remoteVideoView != nil &&
+ _localVideoView != nil &&
+ _actionItemsView!= nil &&
+ _connectButton != nil &&
+ _loopbackButton != nil);
+
+ [self removeConstraints:[self constraints]];
+ NSDictionary* viewsDictionary =
+ NSDictionaryOfVariableBindings(_roomField,
+ _scrollView,
+ _remoteVideoView,
+ _localVideoView,
+ _actionItemsView,
+ _connectButton,
+ _loopbackButton);
+
+ NSSize remoteViewSize = [self remoteVideoViewSize];
+ NSDictionary* metrics = @{
+ @"remoteViewWidth" : @(remoteViewSize.width),
+ @"remoteViewHeight" : @(remoteViewSize.height),
+ @"kBottomViewHeight" : @(kBottomViewHeight),
+ @"localViewHeight" : @(remoteViewSize.height / 3),
+ @"localViewWidth" : @(remoteViewSize.width / 3),
+ @"kRoomFieldWidth" : @(kRoomFieldWidth),
+ @"kActionItemHeight" : @(kActionItemHeight)
+ };
+ // Declare this separately to avoid compiler warning about splitting string
+ // within an NSArray expression.
+ NSString* verticalConstraintLeft =
+ @"V:|-[_remoteVideoView(remoteViewHeight)]-[_scrollView(kBottomViewHeight)]-|";
+ NSString* verticalConstraintRight =
+ @"V:|-[_remoteVideoView(remoteViewHeight)]-[_actionItemsView(kBottomViewHeight)]-|";
+ NSArray* constraintFormats = @[
+ verticalConstraintLeft,
+ verticalConstraintRight,
+ @"H:|-[_remoteVideoView(remoteViewWidth)]-|",
+ @"V:|-[_localVideoView(localViewHeight)]",
+ @"H:|-[_localVideoView(localViewWidth)]",
+ @"H:|-[_scrollView(==_actionItemsView)]-[_actionItemsView]-|"
+ ];
+
+ NSArray* actionItemsConstraints = @[
+ @"H:|-[_roomField(kRoomFieldWidth)]-[_loopbackButton(kRoomFieldWidth)]",
+ @"H:|-[_connectButton(kRoomFieldWidth)]",
+ @"V:|-[_roomField(kActionItemHeight)]-[_connectButton(kActionItemHeight)]",
+ @"V:|-[_loopbackButton(kActionItemHeight)]",
+ ];
+
+ [APPRTCMainView addConstraints:constraintFormats
+ toView:self
+ viewsDictionary:viewsDictionary
+ metrics:metrics];
+ [APPRTCMainView addConstraints:actionItemsConstraints
+ toView:_actionItemsView
+ viewsDictionary:viewsDictionary
+ metrics:metrics];
+ [super updateConstraints];
+}
+
+#pragma mark - Constraints helper
+
++ (void)addConstraints:(NSArray*)constraints toView:(NSView*)view
+ viewsDictionary:(NSDictionary*)viewsDictionary
+ metrics:(NSDictionary*)metrics {
+ for (NSString* constraintFormat in constraints) {
+ NSArray* constraints =
+ [NSLayoutConstraint constraintsWithVisualFormat:constraintFormat
+ options:0
+ metrics:metrics
+ views:viewsDictionary];
+ for (NSLayoutConstraint* constraint in constraints) {
+ [view addConstraint:constraint];
+ }
+ }
+}
+
+#pragma mark - Control actions
+
+- (void)startCall:(id)sender {
+ NSString* roomString = _roomField.stringValue;
+ // Generate room id for loopback options.
+ if (_loopbackButton.intValue && [roomString isEqualToString:@""]) {
+ roomString = [NSUUID UUID].UUIDString;
+ roomString = [roomString stringByReplacingOccurrencesOfString:@"-" withString:@""];
+ }
+ [self.delegate appRTCMainView:self
+ didEnterRoomId:roomString
+ loopback:_loopbackButton.intValue];
+ [self setNeedsUpdateConstraints:YES];
+}
+
+#pragma mark - RTCVideoViewDelegate
+
+- (void)videoView:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoView didChangeVideoSize:(CGSize)size {
+ if (videoView == _remoteVideoView) {
+ _remoteVideoSize = size;
+ } else if (videoView == _localVideoView) {
+ _localVideoSize = size;
+ } else {
+ return;
+ }
+
+ [self setNeedsUpdateConstraints:YES];
+}
+
+#pragma mark - Private
+
+- (void)setupViews {
+ NSParameterAssert([[self subviews] count] == 0);
+
+ _logView = [[NSTextView alloc] initWithFrame:NSZeroRect];
+ [_logView setMinSize:NSMakeSize(0, kBottomViewHeight)];
+ [_logView setMaxSize:NSMakeSize(FLT_MAX, FLT_MAX)];
+ [_logView setVerticallyResizable:YES];
+ [_logView setAutoresizingMask:NSViewWidthSizable];
+ NSTextContainer* textContainer = [_logView textContainer];
+ NSSize containerSize = NSMakeSize(kContentWidth, FLT_MAX);
+ [textContainer setContainerSize:containerSize];
+ [textContainer setWidthTracksTextView:YES];
+ [_logView setEditable:NO];
+
+ [self setupActionItemsView];
+
+ _scrollView = [[NSScrollView alloc] initWithFrame:NSZeroRect];
+ [_scrollView setTranslatesAutoresizingMaskIntoConstraints:NO];
+ [_scrollView setHasVerticalScroller:YES];
+ [_scrollView setDocumentView:_logView];
+ [self addSubview:_scrollView];
+
+ _remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect];
+ _localVideoView = [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect];
+
+ [_remoteVideoView setTranslatesAutoresizingMaskIntoConstraints:NO];
+ [self addSubview:_remoteVideoView];
+ [_localVideoView setTranslatesAutoresizingMaskIntoConstraints:NO];
+ [self addSubview:_localVideoView];
+}
+
+- (void)setupActionItemsView {
+ _actionItemsView = [[NSView alloc] initWithFrame:NSZeroRect];
+ [_actionItemsView setTranslatesAutoresizingMaskIntoConstraints:NO];
+ [self addSubview:_actionItemsView];
+
+ _roomField = [[NSTextField alloc] initWithFrame:NSZeroRect];
+ [_roomField setTranslatesAutoresizingMaskIntoConstraints:NO];
+ [[_roomField cell] setPlaceholderString: @"Enter AppRTC room id"];
+ [_actionItemsView addSubview:_roomField];
+ [_roomField setEditable:YES];
+
+ _connectButton = [[NSButton alloc] initWithFrame:NSZeroRect];
+ [_connectButton setTranslatesAutoresizingMaskIntoConstraints:NO];
+ _connectButton.title = @"Start call";
+ _connectButton.bezelStyle = NSBezelStyleSmallSquare;
+ _connectButton.target = self;
+ _connectButton.action = @selector(startCall:);
+ [_actionItemsView addSubview:_connectButton];
+
+ _loopbackButton = [[NSButton alloc] initWithFrame:NSZeroRect];
+ [_loopbackButton setTranslatesAutoresizingMaskIntoConstraints:NO];
+ _loopbackButton.title = @"Loopback";
+ [_loopbackButton setButtonType:NSButtonTypeSwitch];
+ [_actionItemsView addSubview:_loopbackButton];
+}
+
+- (NSSize)remoteVideoViewSize {
+ if (!_remoteVideoView.bounds.size.width) {
+ return NSMakeSize(kContentWidth, 0);
+ }
+ NSInteger width = MAX(_remoteVideoView.bounds.size.width, kContentWidth);
+ NSInteger height = (width/16) * 9;
+ return NSMakeSize(width, height);
+}
+
+@end
+
+@interface APPRTCViewController ()
+ <ARDAppClientDelegate, APPRTCMainViewDelegate>
+@property(nonatomic, readonly) APPRTCMainView* mainView;
+@end
+
+@implementation APPRTCViewController {
+ ARDAppClient* _client;
+ RTC_OBJC_TYPE(RTCVideoTrack) * _localVideoTrack;
+ RTC_OBJC_TYPE(RTCVideoTrack) * _remoteVideoTrack;
+ ARDCaptureController* _captureController;
+}
+
+- (void)dealloc {
+ [self disconnect];
+}
+
+- (void)viewDidAppear {
+ [super viewDidAppear];
+ [self displayUsageInstructions];
+}
+
+- (void)loadView {
+ APPRTCMainView* view = [[APPRTCMainView alloc] initWithFrame:NSZeroRect];
+ [view setTranslatesAutoresizingMaskIntoConstraints:NO];
+ view.delegate = self;
+ self.view = view;
+}
+
+- (void)windowWillClose:(NSNotification*)notification {
+ [self disconnect];
+}
+
+#pragma mark - Usage
+
+- (void)displayUsageInstructions {
+ [self.mainView displayLogMessage:
+ @"To start call:\n"
+ @"• Enter AppRTC room id (not neccessary for loopback)\n"
+ @"• Start call"];
+}
+
+#pragma mark - ARDAppClientDelegate
+
+- (void)appClient:(ARDAppClient *)client
+ didChangeState:(ARDAppClientState)state {
+ switch (state) {
+ case kARDAppClientStateConnected:
+ [self.mainView displayLogMessage:@"Client connected."];
+ break;
+ case kARDAppClientStateConnecting:
+ [self.mainView displayLogMessage:@"Client connecting."];
+ break;
+ case kARDAppClientStateDisconnected:
+ [self.mainView displayLogMessage:@"Client disconnected."];
+ [self resetUI];
+ _client = nil;
+ break;
+ }
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didChangeConnectionState:(RTCIceConnectionState)state {
+}
+
+- (void)appClient:(ARDAppClient*)client
+ didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer {
+ _captureController =
+ [[ARDCaptureController alloc] initWithCapturer:localCapturer
+ settings:[[ARDSettingsModel alloc] init]];
+ [_captureController startCapture];
+}
+
+- (void)appClient:(ARDAppClient*)client
+ didReceiveLocalVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)localVideoTrack {
+ _localVideoTrack = localVideoTrack;
+ [_localVideoTrack addRenderer:self.mainView.localVideoView];
+}
+
+- (void)appClient:(ARDAppClient*)client
+ didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
+ _remoteVideoTrack = remoteVideoTrack;
+ [_remoteVideoTrack addRenderer:self.mainView.remoteVideoView];
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didError:(NSError *)error {
+ [self showAlertWithMessage:[NSString stringWithFormat:@"%@", error]];
+ [self disconnect];
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didGetStats:(NSArray *)stats {
+}
+
+#pragma mark - APPRTCMainViewDelegate
+
+- (void)appRTCMainView:(APPRTCMainView*)mainView
+ didEnterRoomId:(NSString*)roomId
+ loopback:(BOOL)isLoopback {
+
+ if ([roomId isEqualToString:@""]) {
+ [self.mainView displayLogMessage:@"Missing room id"];
+ return;
+ }
+
+ [self disconnect];
+ ARDAppClient* client = [[ARDAppClient alloc] initWithDelegate:self];
+ [client connectToRoomWithId:roomId
+ settings:[[ARDSettingsModel alloc] init] // Use default settings.
+ isLoopback:isLoopback];
+ _client = client;
+}
+
+#pragma mark - Private
+
+- (APPRTCMainView*)mainView {
+ return (APPRTCMainView*)self.view;
+}
+
+- (void)showAlertWithMessage:(NSString*)message {
+ dispatch_async(dispatch_get_main_queue(), ^{
+ NSAlert* alert = [[NSAlert alloc] init];
+ [alert setMessageText:message];
+ [alert runModal];
+ });
+}
+
+- (void)resetUI {
+ [_remoteVideoTrack removeRenderer:self.mainView.remoteVideoView];
+ [_localVideoTrack removeRenderer:self.mainView.localVideoView];
+ _remoteVideoTrack = nil;
+ _localVideoTrack = nil;
+ [self.mainView.remoteVideoView renderFrame:nil];
+ [self.mainView.localVideoView renderFrame:nil];
+}
+
+- (void)disconnect {
+ [self resetUI];
+ [_captureController stopCapture];
+ _captureController = nil;
+ [_client disconnect];
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/Info.plist b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/Info.plist
new file mode 100644
index 0000000000..d2970eba74
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/Info.plist
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE plist PUBLIC "-//Apple/DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleDisplayName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIdentifier</key>
+ <string>com.Google.${PRODUCT_NAME:rfc1034identifier}</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleVersion</key>
+ <string>1.0</string>
+ <key>LSMinimumSystemVersion</key>
+ <string>${MACOSX_DEPLOYMENT_TARGET}</string>
+ <key>NSPrincipalClass</key>
+ <string>NSApplication</string>
+ <key>NSCameraUsageDescription</key>
+ <string>Camera access needed for video calling</string>
+ <key>NSMicrophoneUsageDescription</key>
+ <string>Microphone access needed for video calling</string>
+</dict>
+</plist> \ No newline at end of file
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/main.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/main.m
new file mode 100644
index 0000000000..79b17f5492
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/main.m
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AppKit/AppKit.h>
+
+#import "APPRTCAppDelegate.h"
+
+int main(int argc, char* argv[]) {
+ @autoreleasepool {
+ [NSApplication sharedApplication];
+ APPRTCAppDelegate* delegate = [[APPRTCAppDelegate alloc] init];
+ [NSApp setDelegate:delegate];
+ [NSApp run];
+ }
+}
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/tests/ARDAppClient_xctest.mm b/third_party/libwebrtc/examples/objc/AppRTCMobile/tests/ARDAppClient_xctest.mm
new file mode 100644
index 0000000000..2694e49914
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/tests/ARDAppClient_xctest.mm
@@ -0,0 +1,266 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <OCMock/OCMock.h>
+#import <QuartzCore/CoreAnimation.h>
+#import <XCTest/XCTest.h>
+
+#include "rtc_base/ssl_adapter.h"
+
+#import "sdk/objc/api/peerconnection/RTCMediaConstraints.h"
+#import "sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h"
+
+#import "ARDAppClient+Internal.h"
+#import "ARDJoinResponse+Internal.h"
+#import "ARDMessageResponse+Internal.h"
+#import "ARDSettingsModel.h"
+
+@interface ARDAppClientTest : XCTestCase
+@end
+
+@implementation ARDAppClientTest
+
+#pragma mark - Mock helpers
+
+- (id)mockRoomServerClientForRoomId:(NSString *)roomId
+ clientId:(NSString *)clientId
+ isInitiator:(BOOL)isInitiator
+ messages:(NSArray *)messages
+ messageHandler:
+ (void (^)(ARDSignalingMessage *))messageHandler {
+ id mockRoomServerClient =
+ [OCMockObject mockForProtocol:@protocol(ARDRoomServerClient)];
+
+ // Successful join response.
+ ARDJoinResponse *joinResponse = [[ARDJoinResponse alloc] init];
+ joinResponse.result = kARDJoinResultTypeSuccess;
+ joinResponse.roomId = roomId;
+ joinResponse.clientId = clientId;
+ joinResponse.isInitiator = isInitiator;
+ joinResponse.messages = messages;
+
+ // Successful message response.
+ ARDMessageResponse *messageResponse = [[ARDMessageResponse alloc] init];
+ messageResponse.result = kARDMessageResultTypeSuccess;
+
+ // Return join response from above on join.
+ [[[mockRoomServerClient stub] andDo:^(NSInvocation *invocation) {
+ __unsafe_unretained void (^completionHandler)(ARDJoinResponse *response,
+ NSError *error);
+ [invocation getArgument:&completionHandler atIndex:4];
+ completionHandler(joinResponse, nil);
+ }] joinRoomWithRoomId:roomId isLoopback:NO completionHandler:[OCMArg any]];
+
+ // Return message response from above on join.
+ [[[mockRoomServerClient stub] andDo:^(NSInvocation *invocation) {
+ __unsafe_unretained ARDSignalingMessage *message;
+ __unsafe_unretained void (^completionHandler)(ARDMessageResponse *response,
+ NSError *error);
+ [invocation getArgument:&message atIndex:2];
+ [invocation getArgument:&completionHandler atIndex:5];
+ messageHandler(message);
+ completionHandler(messageResponse, nil);
+ }] sendMessage:[OCMArg any]
+ forRoomId:roomId
+ clientId:clientId
+ completionHandler:[OCMArg any]];
+
+ // Do nothing on leave.
+ [[[mockRoomServerClient stub] andDo:^(NSInvocation *invocation) {
+ __unsafe_unretained void (^completionHandler)(NSError *error);
+ [invocation getArgument:&completionHandler atIndex:4];
+ if (completionHandler) {
+ completionHandler(nil);
+ }
+ }] leaveRoomWithRoomId:roomId
+ clientId:clientId
+ completionHandler:[OCMArg any]];
+
+ return mockRoomServerClient;
+}
+
+- (id)mockSignalingChannelForRoomId:(NSString *)roomId
+ clientId:(NSString *)clientId
+ messageHandler:
+ (void (^)(ARDSignalingMessage *message))messageHandler {
+ id mockSignalingChannel =
+ [OCMockObject niceMockForProtocol:@protocol(ARDSignalingChannel)];
+ [[mockSignalingChannel stub] registerForRoomId:roomId clientId:clientId];
+ [[[mockSignalingChannel stub] andDo:^(NSInvocation *invocation) {
+ __unsafe_unretained ARDSignalingMessage *message;
+ [invocation getArgument:&message atIndex:2];
+ messageHandler(message);
+ }] sendMessage:[OCMArg any]];
+ return mockSignalingChannel;
+}
+
+- (id)mockTURNClient {
+ id mockTURNClient =
+ [OCMockObject mockForProtocol:@protocol(ARDTURNClient)];
+ [[[mockTURNClient stub] andDo:^(NSInvocation *invocation) {
+ // Don't return anything in TURN response.
+ __unsafe_unretained void (^completionHandler)(NSArray *turnServers,
+ NSError *error);
+ [invocation getArgument:&completionHandler atIndex:2];
+ completionHandler([NSArray array], nil);
+ }] requestServersWithCompletionHandler:[OCMArg any]];
+ return mockTURNClient;
+}
+
+- (id)mockSettingsModel {
+ ARDSettingsModel *model = [[ARDSettingsModel alloc] init];
+ id partialMock = [OCMockObject partialMockForObject:model];
+ [[[partialMock stub] andReturn:@[ @"640x480", @"960x540", @"1280x720" ]]
+ availableVideoResolutions];
+
+ return model;
+}
+
+- (ARDAppClient *)createAppClientForRoomId:(NSString *)roomId
+ clientId:(NSString *)clientId
+ isInitiator:(BOOL)isInitiator
+ messages:(NSArray *)messages
+ messageHandler:
+ (void (^)(ARDSignalingMessage *message))messageHandler
+ connectedHandler:(void (^)(void))connectedHandler
+ localVideoTrackHandler:(void (^)(void))localVideoTrackHandler {
+ id turnClient = [self mockTURNClient];
+ id signalingChannel = [self mockSignalingChannelForRoomId:roomId
+ clientId:clientId
+ messageHandler:messageHandler];
+ id roomServerClient =
+ [self mockRoomServerClientForRoomId:roomId
+ clientId:clientId
+ isInitiator:isInitiator
+ messages:messages
+ messageHandler:messageHandler];
+ id delegate =
+ [OCMockObject niceMockForProtocol:@protocol(ARDAppClientDelegate)];
+ [[[delegate stub] andDo:^(NSInvocation *invocation) {
+ connectedHandler();
+ }] appClient:[OCMArg any]
+ didChangeConnectionState:RTCIceConnectionStateConnected];
+ [[[delegate stub] andDo:^(NSInvocation *invocation) {
+ localVideoTrackHandler();
+ }] appClient:[OCMArg any]
+ didReceiveLocalVideoTrack:[OCMArg any]];
+
+ return [[ARDAppClient alloc] initWithRoomServerClient:roomServerClient
+ signalingChannel:signalingChannel
+ turnClient:turnClient
+ delegate:delegate];
+}
+
+#pragma mark - Cases
+
+// Tests that an ICE connection is established between two ARDAppClient objects
+// where one is set up as a caller and the other the answerer. Network
+// components are mocked out and messages are relayed directly from object to
+// object. It's expected that both clients reach the
+// RTCIceConnectionStateConnected state within a reasonable amount of time.
+- (void)testSession {
+ // Need block arguments here because we're setting up a callbacks before we
+ // create the clients.
+ ARDAppClient *caller = nil;
+ ARDAppClient *answerer = nil;
+ __block __weak ARDAppClient *weakCaller = nil;
+ __block __weak ARDAppClient *weakAnswerer = nil;
+ NSString *roomId = @"testRoom";
+ NSString *callerId = @"testCallerId";
+ NSString *answererId = @"testAnswererId";
+
+ XCTestExpectation *callerConnectionExpectation =
+ [self expectationWithDescription:@"Caller PC connected"];
+ XCTestExpectation *answererConnectionExpectation =
+ [self expectationWithDescription:@"Answerer PC connected"];
+
+ caller = [self createAppClientForRoomId:roomId
+ clientId:callerId
+ isInitiator:YES
+ messages:[NSArray array]
+ messageHandler:^(ARDSignalingMessage *message) {
+ ARDAppClient *strongAnswerer = weakAnswerer;
+ [strongAnswerer channel:strongAnswerer.channel didReceiveMessage:message];
+ } connectedHandler:^{
+ [callerConnectionExpectation fulfill];
+ } localVideoTrackHandler:^{
+ }];
+ // TODO(tkchin): Figure out why DTLS-SRTP constraint causes thread assertion
+ // crash in Debug.
+ caller.defaultPeerConnectionConstraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil
+ optionalConstraints:nil];
+ weakCaller = caller;
+
+ answerer = [self createAppClientForRoomId:roomId
+ clientId:answererId
+ isInitiator:NO
+ messages:[NSArray array]
+ messageHandler:^(ARDSignalingMessage *message) {
+ ARDAppClient *strongCaller = weakCaller;
+ [strongCaller channel:strongCaller.channel didReceiveMessage:message];
+ } connectedHandler:^{
+ [answererConnectionExpectation fulfill];
+ } localVideoTrackHandler:^{
+ }];
+ // TODO(tkchin): Figure out why DTLS-SRTP constraint causes thread assertion
+ // crash in Debug.
+ answerer.defaultPeerConnectionConstraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil
+ optionalConstraints:nil];
+ weakAnswerer = answerer;
+
+ // Kick off connection.
+ [caller connectToRoomWithId:roomId settings:[self mockSettingsModel] isLoopback:NO];
+ [answerer connectToRoomWithId:roomId settings:[self mockSettingsModel] isLoopback:NO];
+ [self waitForExpectationsWithTimeout:20 handler:^(NSError *error) {
+ if (error) {
+ XCTFail(@"Expectation failed with error %@.", error);
+ }
+ }];
+}
+
+// Test to see that we get a local video connection
+// Note this will currently pass even when no camera is connected as a local
+// video track is created regardless (Perhaps there should be a test for that...)
+#if !TARGET_IPHONE_SIMULATOR // Expect to fail on simulator due to no camera support
+- (void)testSessionShouldGetLocalVideoTrackCallback {
+ ARDAppClient *caller = nil;
+ NSString *roomId = @"testRoom";
+ NSString *callerId = @"testCallerId";
+
+ XCTestExpectation *localVideoTrackExpectation =
+ [self expectationWithDescription:@"Caller got local video."];
+
+ caller = [self createAppClientForRoomId:roomId
+ clientId:callerId
+ isInitiator:YES
+ messages:[NSArray array]
+ messageHandler:^(ARDSignalingMessage *message) {}
+ connectedHandler:^{}
+ localVideoTrackHandler:^{ [localVideoTrackExpectation fulfill]; }];
+ caller.defaultPeerConnectionConstraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil
+ optionalConstraints:nil];
+
+ // Kick off connection.
+ [caller connectToRoomWithId:roomId
+ settings:[self mockSettingsModel]
+ isLoopback:NO];
+ [self waitForExpectationsWithTimeout:20 handler:^(NSError *error) {
+ if (error) {
+ XCTFail("Expectation timed out with error: %@.", error);
+ }
+ }];
+}
+#endif
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/tests/ARDFileCaptureController_xctest.mm b/third_party/libwebrtc/examples/objc/AppRTCMobile/tests/ARDFileCaptureController_xctest.mm
new file mode 100644
index 0000000000..2e39834190
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/tests/ARDFileCaptureController_xctest.mm
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <OCMock/OCMock.h>
+#import <XCTest/XCTest.h>
+
+#import "ARDFileCaptureController.h"
+
+#import "sdk/objc/components/capturer/RTCFileVideoCapturer.h"
+
+NS_CLASS_AVAILABLE_IOS(10)
+@interface ARDFileCaptureControllerTests : XCTestCase
+
+@property(nonatomic, strong) ARDFileCaptureController *fileCaptureController;
+@property(nonatomic, strong) id fileCapturerMock;
+
+@end
+
+@implementation ARDFileCaptureControllerTests
+
+@synthesize fileCaptureController = _fileCaptureController;
+@synthesize fileCapturerMock = _fileCapturerMock;
+
+- (void)setUp {
+ [super setUp];
+ self.fileCapturerMock = OCMClassMock([RTC_OBJC_TYPE(RTCFileVideoCapturer) class]);
+ self.fileCaptureController =
+ [[ARDFileCaptureController alloc] initWithCapturer:self.fileCapturerMock];
+}
+
+- (void)tearDown {
+ self.fileCaptureController = nil;
+ [self.fileCapturerMock stopMocking];
+ self.fileCapturerMock = nil;
+ [super tearDown];
+}
+
+- (void)testCaptureIsStarted {
+ [[self.fileCapturerMock expect] startCapturingFromFileNamed:[OCMArg any] onError:[OCMArg any]];
+
+ [self.fileCaptureController startCapture];
+
+ [self.fileCapturerMock verify];
+}
+
+- (void)testCaptureIsStoped {
+ [[self.fileCapturerMock expect] stopCapture];
+
+ [self.fileCaptureController stopCapture];
+
+ [self.fileCapturerMock verify];
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/tests/ARDSettingsModel_xctest.mm b/third_party/libwebrtc/examples/objc/AppRTCMobile/tests/ARDSettingsModel_xctest.mm
new file mode 100644
index 0000000000..dc62798963
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/tests/ARDSettingsModel_xctest.mm
@@ -0,0 +1,96 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <OCMock/OCMock.h>
+#import <XCTest/XCTest.h>
+
+#import "sdk/objc/api/peerconnection/RTCMediaConstraints.h"
+
+#import "ARDSettingsModel+Private.h"
+#import "ARDSettingsStore.h"
+
+
+@interface ARDSettingsModelTests : XCTestCase {
+ ARDSettingsModel *_model;
+}
+@end
+
+@implementation ARDSettingsModelTests
+
+- (id)setupMockStore {
+ id storeMock = [OCMockObject mockForClass:[ARDSettingsStore class]];
+
+ id partialMock = [OCMockObject partialMockForObject:_model];
+ [[[partialMock stub] andReturn:storeMock] settingsStore];
+ [[[partialMock stub] andReturn:@[ @"640x480", @"960x540", @"1280x720" ]]
+ availableVideoResolutions];
+
+ return storeMock;
+}
+
+- (void)setUp {
+ _model = [[ARDSettingsModel alloc] init];
+}
+
+- (void)testRetrievingSetting {
+ id storeMock = [self setupMockStore];
+ [[[storeMock expect] andReturn:@"640x480"] videoResolution];
+ NSString *string = [_model currentVideoResolutionSettingFromStore];
+
+ XCTAssertEqualObjects(string, @"640x480");
+}
+
+- (void)testStoringInvalidConstraintReturnsNo {
+ id storeMock = [self setupMockStore];
+ [([[storeMock stub] andReturn:@"960x480"])videoResolution];
+ XCTAssertFalse([_model storeVideoResolutionSetting:@"960x480"]);
+}
+
+- (void)testWidthConstraintFromStore {
+ id storeMock = [self setupMockStore];
+ [([[storeMock stub] andReturn:@"1270x480"])videoResolution];
+ int width = [_model currentVideoResolutionWidthFromStore];
+
+ XCTAssertEqual(width, 1270);
+}
+
+- (void)testHeightConstraintFromStore {
+ id storeMock = [self setupMockStore];
+ [([[storeMock stub] andReturn:@"960x540"])videoResolution];
+ int height = [_model currentVideoResolutionHeightFromStore];
+
+ XCTAssertEqual(height, 540);
+}
+
+- (void)testConstraintComponentIsNilWhenInvalidConstraintString {
+ id storeMock = [self setupMockStore];
+ [([[storeMock stub] andReturn:@"invalid"])videoResolution];
+ int width = [_model currentVideoResolutionWidthFromStore];
+
+ XCTAssertEqual(width, 0);
+}
+
+- (void)testStoringAudioSetting {
+ id storeMock = [self setupMockStore];
+ [[storeMock expect] setAudioOnly:YES];
+
+ [_model storeAudioOnlySetting:YES];
+ [storeMock verify];
+}
+
+- (void)testReturningDefaultCallOption {
+ id storeMock = [self setupMockStore];
+ [[[storeMock stub] andReturnValue:@YES] useManualAudioConfig];
+
+ XCTAssertTrue([_model currentUseManualAudioConfigSettingFromStore]);
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/tests/main.mm b/third_party/libwebrtc/examples/objc/AppRTCMobile/tests/main.mm
new file mode 100644
index 0000000000..3625ffd7bf
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/tests/main.mm
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+#include "test/ios/coverage_util_ios.h"
+
+int main(int argc, char* argv[]) {
+ rtc::test::ConfigureCoverageReportPath();
+
+ @autoreleasepool {
+ return UIApplicationMain(argc, argv, nil, nil);
+ }
+}
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/third_party/SocketRocket/LICENSE b/third_party/libwebrtc/examples/objc/AppRTCMobile/third_party/SocketRocket/LICENSE
new file mode 100644
index 0000000000..c01a79c3bd
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/third_party/SocketRocket/LICENSE
@@ -0,0 +1,15 @@
+
+ Copyright 2012 Square Inc.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.h
new file mode 100644
index 0000000000..a230646073
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.h
@@ -0,0 +1,135 @@
+//
+// Copyright 2012 Square Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+#import <Foundation/Foundation.h>
+#import <Security/SecCertificate.h>
+
+typedef enum {
+ SR_CONNECTING = 0,
+ SR_OPEN = 1,
+ SR_CLOSING = 2,
+ SR_CLOSED = 3,
+} SRReadyState;
+
+typedef enum SRStatusCode : NSInteger {
+ SRStatusCodeNormal = 1000,
+ SRStatusCodeGoingAway = 1001,
+ SRStatusCodeProtocolError = 1002,
+ SRStatusCodeUnhandledType = 1003,
+ // 1004 reserved.
+ SRStatusNoStatusReceived = 1005,
+ // 1004-1006 reserved.
+ SRStatusCodeInvalidUTF8 = 1007,
+ SRStatusCodePolicyViolated = 1008,
+ SRStatusCodeMessageTooBig = 1009,
+} SRStatusCode;
+
+@class SRWebSocket;
+
+extern NSString *const SRWebSocketErrorDomain;
+extern NSString *const SRHTTPResponseErrorKey;
+
+#pragma mark - SRWebSocketDelegate
+
+@protocol SRWebSocketDelegate;
+
+#pragma mark - SRWebSocket
+
+@interface SRWebSocket : NSObject <NSStreamDelegate>
+
+@property(nonatomic, weak) id<SRWebSocketDelegate> delegate;
+
+@property(nonatomic, readonly) SRReadyState readyState;
+@property(nonatomic, readonly, retain) NSURL *url;
+
+// This returns the negotiated protocol.
+// It will be nil until after the handshake completes.
+@property(nonatomic, readonly, copy) NSString *protocol;
+
+// Protocols should be an array of strings that turn into Sec-WebSocket-Protocol.
+- (id)initWithURLRequest:(NSURLRequest *)request protocols:(NSArray *)protocols;
+- (id)initWithURLRequest:(NSURLRequest *)request;
+
+// Some helper constructors.
+- (id)initWithURL:(NSURL *)url protocols:(NSArray *)protocols;
+- (id)initWithURL:(NSURL *)url;
+
+// Delegate queue will be dispatch_main_queue by default.
+// You cannot set both OperationQueue and dispatch_queue.
+- (void)setDelegateOperationQueue:(NSOperationQueue *)queue;
+- (void)setDelegateDispatchQueue:(dispatch_queue_t)queue;
+
+// By default, it will schedule itself on +[NSRunLoop SR_networkRunLoop] using defaultModes.
+- (void)scheduleInRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode;
+- (void)unscheduleFromRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode;
+
+// SRWebSockets are intended for one-time-use only. Open should be called once and only once.
+- (void)open;
+
+- (void)close;
+- (void)closeWithCode:(NSInteger)code reason:(NSString *)reason;
+
+// Send a UTF8 String or Data.
+- (void)send:(id)data;
+
+// Send Data (can be nil) in a ping message.
+- (void)sendPing:(NSData *)data;
+
+@end
+
+#pragma mark - SRWebSocketDelegate
+
+@protocol SRWebSocketDelegate <NSObject>
+
+// message will either be an NSString if the server is using text
+// or NSData if the server is using binary.
+- (void)webSocket:(SRWebSocket *)webSocket didReceiveMessage:(id)message;
+
+@optional
+
+- (void)webSocketDidOpen:(SRWebSocket *)webSocket;
+- (void)webSocket:(SRWebSocket *)webSocket didFailWithError:(NSError *)error;
+- (void)webSocket:(SRWebSocket *)webSocket
+ didCloseWithCode:(NSInteger)code
+ reason:(NSString *)reason
+ wasClean:(BOOL)wasClean;
+- (void)webSocket:(SRWebSocket *)webSocket didReceivePong:(NSData *)pongPayload;
+
+@end
+
+#pragma mark - NSURLRequest (CertificateAdditions)
+
+@interface NSURLRequest (CertificateAdditions)
+
+@property(nonatomic, retain, readonly) NSArray *SR_SSLPinnedCertificates;
+
+@end
+
+#pragma mark - NSMutableURLRequest (CertificateAdditions)
+
+@interface NSMutableURLRequest (CertificateAdditions)
+
+@property(nonatomic, retain) NSArray *SR_SSLPinnedCertificates;
+
+@end
+
+#pragma mark - NSRunLoop (SRWebSocket)
+
+@interface NSRunLoop (SRWebSocket)
+
++ (NSRunLoop *)SR_networkRunLoop;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.m
new file mode 100644
index 0000000000..ab0d1b89bc
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.m
@@ -0,0 +1,1774 @@
+//
+// Copyright 2012 Square Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+
+#import "SRWebSocket.h"
+
+#if TARGET_OS_IPHONE
+#define HAS_ICU
+#endif
+
+#ifdef HAS_ICU
+#import <unicode/utf8.h>
+#endif
+
+#if TARGET_OS_IPHONE
+#import <Endian.h>
+#else
+#import <CoreServices/CoreServices.h>
+#endif
+
+#import <CommonCrypto/CommonDigest.h>
+#import <Security/SecRandom.h>
+
+#if OS_OBJECT_USE_OBJC_RETAIN_RELEASE
+#define sr_dispatch_retain(x)
+#define sr_dispatch_release(x)
+#define maybe_bridge(x) ((__bridge void *) x)
+#else
+#define sr_dispatch_retain(x) dispatch_retain(x)
+#define sr_dispatch_release(x) dispatch_release(x)
+#define maybe_bridge(x) (x)
+#endif
+
+#if !__has_feature(objc_arc)
+#error SocketRocket must be compiled with ARC enabled
+#endif
+
+
+typedef enum {
+ SROpCodeTextFrame = 0x1,
+ SROpCodeBinaryFrame = 0x2,
+ // 3-7 reserved.
+ SROpCodeConnectionClose = 0x8,
+ SROpCodePing = 0x9,
+ SROpCodePong = 0xA,
+ // B-F reserved.
+} SROpCode;
+
+typedef struct {
+ BOOL fin;
+// BOOL rsv1;
+// BOOL rsv2;
+// BOOL rsv3;
+ uint8_t opcode;
+ BOOL masked;
+ uint64_t payload_length;
+} frame_header;
+
+static NSString *const SRWebSocketAppendToSecKeyString = @"258EAFA5-E914-47DA-95CA-C5AB0DC85B11";
+
+static inline int32_t validate_dispatch_data_partial_string(NSData *data);
+static inline void SRFastLog(NSString *format, ...);
+
+@interface NSData (SRWebSocket)
+
+- (NSString *)stringBySHA1ThenBase64Encoding;
+
+@end
+
+
+@interface NSString (SRWebSocket)
+
+- (NSString *)stringBySHA1ThenBase64Encoding;
+
+@end
+
+
+@interface NSURL (SRWebSocket)
+
+// The origin isn't really applicable for a native application.
+// So instead, just map ws -> http and wss -> https.
+- (NSString *)SR_origin;
+
+@end
+
+
+@interface _SRRunLoopThread : NSThread
+
+@property (nonatomic, readonly) NSRunLoop *runLoop;
+
+@end
+
+
+static NSString *newSHA1String(const char *bytes, size_t length) {
+ uint8_t md[CC_SHA1_DIGEST_LENGTH];
+
+ assert(length >= 0);
+ assert(length <= UINT32_MAX);
+ CC_SHA1(bytes, (CC_LONG)length, md);
+
+ NSData *data = [NSData dataWithBytes:md length:CC_SHA1_DIGEST_LENGTH];
+
+ if ([data respondsToSelector:@selector(base64EncodedStringWithOptions:)]) {
+ return [data base64EncodedStringWithOptions:0];
+ }
+
+ return [data base64Encoding];
+}
+
+@implementation NSData (SRWebSocket)
+
+- (NSString *)stringBySHA1ThenBase64Encoding;
+{
+ return newSHA1String(self.bytes, self.length);
+}
+
+@end
+
+
+@implementation NSString (SRWebSocket)
+
+- (NSString *)stringBySHA1ThenBase64Encoding;
+{
+ return newSHA1String(self.UTF8String, self.length);
+}
+
+@end
+
+NSString *const SRWebSocketErrorDomain = @"SRWebSocketErrorDomain";
+NSString *const SRHTTPResponseErrorKey = @"HTTPResponseStatusCode";
+
+// Returns number of bytes consumed. Returning 0 means you didn't match.
+// Sends bytes to callback handler;
+typedef size_t (^stream_scanner)(NSData *collected_data);
+
+typedef void (^data_callback)(SRWebSocket *webSocket, NSData *data);
+
+@interface SRIOConsumer : NSObject {
+ stream_scanner _scanner;
+ data_callback _handler;
+ size_t _bytesNeeded;
+ BOOL _readToCurrentFrame;
+ BOOL _unmaskBytes;
+}
+@property (nonatomic, copy, readonly) stream_scanner consumer;
+@property (nonatomic, copy, readonly) data_callback handler;
+@property (nonatomic, assign) size_t bytesNeeded;
+@property (nonatomic, assign, readonly) BOOL readToCurrentFrame;
+@property (nonatomic, assign, readonly) BOOL unmaskBytes;
+
+@end
+
+// This class is not thread-safe, and is expected to always be run on the same queue.
+@interface SRIOConsumerPool : NSObject
+
+- (id)initWithBufferCapacity:(NSUInteger)poolSize;
+
+- (SRIOConsumer *)consumerWithScanner:(stream_scanner)scanner handler:(data_callback)handler bytesNeeded:(size_t)bytesNeeded readToCurrentFrame:(BOOL)readToCurrentFrame unmaskBytes:(BOOL)unmaskBytes;
+- (void)returnConsumer:(SRIOConsumer *)consumer;
+
+@end
+
+@interface SRWebSocket () <NSStreamDelegate>
+
+- (void)_writeData:(NSData *)data;
+- (void)_closeWithProtocolError:(NSString *)message;
+- (void)_failWithError:(NSError *)error;
+
+- (void)_disconnect;
+
+- (void)_readFrameNew;
+- (void)_readFrameContinue;
+
+- (void)_pumpScanner;
+
+- (void)_pumpWriting;
+
+- (void)_addConsumerWithScanner:(stream_scanner)consumer callback:(data_callback)callback;
+- (void)_addConsumerWithDataLength:(size_t)dataLength callback:(data_callback)callback readToCurrentFrame:(BOOL)readToCurrentFrame unmaskBytes:(BOOL)unmaskBytes;
+- (void)_addConsumerWithScanner:(stream_scanner)consumer callback:(data_callback)callback dataLength:(size_t)dataLength;
+- (void)_readUntilBytes:(const void *)bytes length:(size_t)length callback:(data_callback)dataHandler;
+- (void)_readUntilHeaderCompleteWithCallback:(data_callback)dataHandler;
+
+- (void)_sendFrameWithOpcode:(SROpCode)opcode data:(id)data;
+
+- (BOOL)_checkHandshake:(CFHTTPMessageRef)httpMessage;
+- (void)_SR_commonInit;
+
+- (void)_initializeStreams;
+- (void)_connect;
+
+@property (nonatomic) SRReadyState readyState;
+
+@property (nonatomic) NSOperationQueue *delegateOperationQueue;
+@property (nonatomic) dispatch_queue_t delegateDispatchQueue;
+
+@end
+
+
+@implementation SRWebSocket {
+ NSInteger _webSocketVersion;
+
+ NSOperationQueue *_delegateOperationQueue;
+ dispatch_queue_t _delegateDispatchQueue;
+
+ dispatch_queue_t _workQueue;
+ NSMutableArray *_consumers;
+
+ NSInputStream *_inputStream;
+ NSOutputStream *_outputStream;
+
+ NSMutableData *_readBuffer;
+ NSUInteger _readBufferOffset;
+
+ NSMutableData *_outputBuffer;
+ NSUInteger _outputBufferOffset;
+
+ uint8_t _currentFrameOpcode;
+ size_t _currentFrameCount;
+ size_t _readOpCount;
+ uint32_t _currentStringScanPosition;
+ NSMutableData *_currentFrameData;
+
+ NSString *_closeReason;
+
+ NSString *_secKey;
+
+ BOOL _pinnedCertFound;
+
+ uint8_t _currentReadMaskKey[4];
+ size_t _currentReadMaskOffset;
+
+ BOOL _consumerStopped;
+
+ BOOL _closeWhenFinishedWriting;
+ BOOL _failed;
+
+ BOOL _secure;
+ NSURLRequest *_urlRequest;
+
+ CFHTTPMessageRef _receivedHTTPHeaders;
+
+ BOOL _sentClose;
+ BOOL _didFail;
+ int _closeCode;
+
+ BOOL _isPumping;
+
+ NSMutableSet *_scheduledRunloops;
+
+ // We use this to retain ourselves.
+ __strong SRWebSocket *_selfRetain;
+
+ NSArray *_requestedProtocols;
+ SRIOConsumerPool *_consumerPool;
+}
+
+@synthesize delegate = _delegate;
+@synthesize url = _url;
+@synthesize readyState = _readyState;
+@synthesize protocol = _protocol;
+
+static __strong NSData *CRLFCRLF;
+
++ (void)initialize;
+{
+ CRLFCRLF = [[NSData alloc] initWithBytes:"\r\n\r\n" length:4];
+}
+
+- (id)initWithURLRequest:(NSURLRequest *)request protocols:(NSArray *)protocols;
+{
+ self = [super init];
+ if (self) {
+ assert(request.URL);
+ _url = request.URL;
+ _urlRequest = request;
+
+ _requestedProtocols = [protocols copy];
+
+ [self _SR_commonInit];
+ }
+
+ return self;
+}
+
+- (id)initWithURLRequest:(NSURLRequest *)request;
+{
+ return [self initWithURLRequest:request protocols:nil];
+}
+
+- (id)initWithURL:(NSURL *)url;
+{
+ return [self initWithURL:url protocols:nil];
+}
+
+- (id)initWithURL:(NSURL *)url protocols:(NSArray *)protocols;
+{
+ NSMutableURLRequest *request = [[NSMutableURLRequest alloc] initWithURL:url];
+ return [self initWithURLRequest:request protocols:protocols];
+}
+
+- (void)_SR_commonInit;
+{
+
+ NSString *scheme = _url.scheme.lowercaseString;
+ assert([scheme isEqualToString:@"ws"] || [scheme isEqualToString:@"http"] || [scheme isEqualToString:@"wss"] || [scheme isEqualToString:@"https"]);
+
+ if ([scheme isEqualToString:@"wss"] || [scheme isEqualToString:@"https"]) {
+ _secure = YES;
+ }
+
+ _readyState = SR_CONNECTING;
+ _consumerStopped = YES;
+ _webSocketVersion = 13;
+
+ _workQueue = dispatch_queue_create(NULL, DISPATCH_QUEUE_SERIAL);
+
+ // Going to set a specific on the queue so we can validate we're on the work queue
+ dispatch_queue_set_specific(_workQueue, (__bridge void *)self, maybe_bridge(_workQueue), NULL);
+
+ _delegateDispatchQueue = dispatch_get_main_queue();
+ sr_dispatch_retain(_delegateDispatchQueue);
+
+ _readBuffer = [[NSMutableData alloc] init];
+ _outputBuffer = [[NSMutableData alloc] init];
+
+ _currentFrameData = [[NSMutableData alloc] init];
+
+ _consumers = [[NSMutableArray alloc] init];
+
+ _consumerPool = [[SRIOConsumerPool alloc] init];
+
+ _scheduledRunloops = [[NSMutableSet alloc] init];
+
+ [self _initializeStreams];
+
+ // default handlers
+}
+
+- (void)assertOnWorkQueue;
+{
+ assert(dispatch_get_specific((__bridge void *)self) == maybe_bridge(_workQueue));
+}
+
+- (void)dealloc
+{
+ _inputStream.delegate = nil;
+ _outputStream.delegate = nil;
+
+ [_inputStream close];
+ [_outputStream close];
+
+ sr_dispatch_release(_workQueue);
+ _workQueue = NULL;
+
+ if (_receivedHTTPHeaders) {
+ CFRelease(_receivedHTTPHeaders);
+ _receivedHTTPHeaders = NULL;
+ }
+
+ if (_delegateDispatchQueue) {
+ sr_dispatch_release(_delegateDispatchQueue);
+ _delegateDispatchQueue = NULL;
+ }
+}
+
+#ifndef NDEBUG
+
+- (void)setReadyState:(SRReadyState)aReadyState;
+{
+ [self willChangeValueForKey:@"readyState"];
+ assert(aReadyState > _readyState);
+ _readyState = aReadyState;
+ [self didChangeValueForKey:@"readyState"];
+}
+
+#endif
+
+- (void)open;
+{
+ assert(_url);
+ NSAssert(_readyState == SR_CONNECTING, @"Cannot call -(void)open on SRWebSocket more than once");
+
+ _selfRetain = self;
+
+ [self _connect];
+}
+
+// Calls block on delegate queue
+- (void)_performDelegateBlock:(dispatch_block_t)block;
+{
+ if (_delegateOperationQueue) {
+ [_delegateOperationQueue addOperationWithBlock:block];
+ } else {
+ assert(_delegateDispatchQueue);
+ dispatch_async(_delegateDispatchQueue, block);
+ }
+}
+
+- (void)setDelegateDispatchQueue:(dispatch_queue_t)queue;
+{
+ if (queue) {
+ sr_dispatch_retain(queue);
+ }
+
+ if (_delegateDispatchQueue) {
+ sr_dispatch_release(_delegateDispatchQueue);
+ }
+
+ _delegateDispatchQueue = queue;
+}
+
+- (BOOL)_checkHandshake:(CFHTTPMessageRef)httpMessage;
+{
+ NSString *acceptHeader = CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue(httpMessage, CFSTR("Sec-WebSocket-Accept")));
+
+ if (acceptHeader == nil) {
+ return NO;
+ }
+
+ NSString *concattedString = [_secKey stringByAppendingString:SRWebSocketAppendToSecKeyString];
+ NSString *expectedAccept = [concattedString stringBySHA1ThenBase64Encoding];
+
+ return [acceptHeader isEqualToString:expectedAccept];
+}
+
+- (void)_HTTPHeadersDidFinish;
+{
+ NSInteger responseCode = CFHTTPMessageGetResponseStatusCode(_receivedHTTPHeaders);
+
+ if (responseCode >= 400) {
+ SRFastLog(@"Request failed with response code %d", responseCode);
+ [self _failWithError:[NSError errorWithDomain:SRWebSocketErrorDomain code:2132 userInfo:@{NSLocalizedDescriptionKey:[NSString stringWithFormat:@"received bad response code from server %ld", (long)responseCode], SRHTTPResponseErrorKey:@(responseCode)}]];
+ return;
+ }
+
+ if(![self _checkHandshake:_receivedHTTPHeaders]) {
+ [self _failWithError:[NSError errorWithDomain:SRWebSocketErrorDomain code:2133 userInfo:[NSDictionary dictionaryWithObject:[NSString stringWithFormat:@"Invalid Sec-WebSocket-Accept response"] forKey:NSLocalizedDescriptionKey]]];
+ return;
+ }
+
+ NSString *negotiatedProtocol = CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue(_receivedHTTPHeaders, CFSTR("Sec-WebSocket-Protocol")));
+ if (negotiatedProtocol) {
+ // Make sure we requested the protocol
+ if ([_requestedProtocols indexOfObject:negotiatedProtocol] == NSNotFound) {
+ [self _failWithError:[NSError errorWithDomain:SRWebSocketErrorDomain code:2133 userInfo:[NSDictionary dictionaryWithObject:[NSString stringWithFormat:@"Server specified Sec-WebSocket-Protocol that wasn't requested"] forKey:NSLocalizedDescriptionKey]]];
+ return;
+ }
+
+ _protocol = negotiatedProtocol;
+ }
+
+ self.readyState = SR_OPEN;
+
+ if (!_didFail) {
+ [self _readFrameNew];
+ }
+
+ [self _performDelegateBlock:^{
+ if ([self.delegate respondsToSelector:@selector(webSocketDidOpen:)]) {
+ [self.delegate webSocketDidOpen:self];
+ };
+ }];
+}
+
+
+- (void)_readHTTPHeader;
+{
+ if (_receivedHTTPHeaders == NULL) {
+ _receivedHTTPHeaders = CFHTTPMessageCreateEmpty(NULL, NO);
+ }
+
+ [self _readUntilHeaderCompleteWithCallback:^(SRWebSocket *self, NSData *data) {
+ CFHTTPMessageAppendBytes(self->_receivedHTTPHeaders, (const UInt8 *)data.bytes, data.length);
+
+ if (CFHTTPMessageIsHeaderComplete(self->_receivedHTTPHeaders)) {
+ SRFastLog(@"Finished reading headers %@",
+ CFBridgingRelease(CFHTTPMessageCopyAllHeaderFields(self->_receivedHTTPHeaders)));
+ [self _HTTPHeadersDidFinish];
+ } else {
+ [self _readHTTPHeader];
+ }
+ }];
+}
+
+- (void)didConnect
+{
+ SRFastLog(@"Connected");
+ CFHTTPMessageRef request = CFHTTPMessageCreateRequest(NULL, CFSTR("GET"), (__bridge CFURLRef)_url, kCFHTTPVersion1_1);
+
+ // Set host first so it defaults
+ CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Host"), (__bridge CFStringRef)(_url.port ? [NSString stringWithFormat:@"%@:%@", _url.host, _url.port] : _url.host));
+
+ NSMutableData *keyBytes = [[NSMutableData alloc] initWithLength:16];
+ BOOL success = !SecRandomCopyBytes(kSecRandomDefault, keyBytes.length, keyBytes.mutableBytes);
+ assert(success);
+
+ if ([keyBytes respondsToSelector:@selector(base64EncodedStringWithOptions:)]) {
+ _secKey = [keyBytes base64EncodedStringWithOptions:0];
+ } else {
+ _secKey = [keyBytes base64Encoding];
+ }
+
+ assert([_secKey length] == 24);
+
+ CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Upgrade"), CFSTR("websocket"));
+ CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Connection"), CFSTR("Upgrade"));
+ CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Sec-WebSocket-Key"), (__bridge CFStringRef)_secKey);
+ CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Sec-WebSocket-Version"), (__bridge CFStringRef)[NSString stringWithFormat:@"%ld", (long)_webSocketVersion]);
+
+ CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Origin"), (__bridge CFStringRef)_url.SR_origin);
+
+ if (_requestedProtocols) {
+ CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Sec-WebSocket-Protocol"), (__bridge CFStringRef)[_requestedProtocols componentsJoinedByString:@", "]);
+ }
+
+ [_urlRequest.allHTTPHeaderFields enumerateKeysAndObjectsUsingBlock:^(id key, id obj, BOOL *stop) {
+ CFHTTPMessageSetHeaderFieldValue(request, (__bridge CFStringRef)key, (__bridge CFStringRef)obj);
+ }];
+
+ NSData *message = CFBridgingRelease(CFHTTPMessageCopySerializedMessage(request));
+
+ CFRelease(request);
+
+ [self _writeData:message];
+ [self _readHTTPHeader];
+}
+
+- (void)_initializeStreams;
+{
+ assert(_url.port.unsignedIntValue <= UINT32_MAX);
+ uint32_t port = _url.port.unsignedIntValue;
+ if (port == 0) {
+ if (!_secure) {
+ port = 80;
+ } else {
+ port = 443;
+ }
+ }
+ NSString *host = _url.host;
+
+ CFReadStreamRef readStream = NULL;
+ CFWriteStreamRef writeStream = NULL;
+
+ CFStreamCreatePairWithSocketToHost(NULL, (__bridge CFStringRef)host, port, &readStream, &writeStream);
+
+ _outputStream = CFBridgingRelease(writeStream);
+ _inputStream = CFBridgingRelease(readStream);
+
+
+ if (_secure) {
+ NSMutableDictionary *SSLOptions = [[NSMutableDictionary alloc] init];
+
+ [_outputStream setProperty:(__bridge id)kCFStreamSocketSecurityLevelNegotiatedSSL forKey:(__bridge id)kCFStreamPropertySocketSecurityLevel];
+
+ // If we're using pinned certs, don't validate the certificate chain
+ if ([_urlRequest SR_SSLPinnedCertificates].count) {
+ [SSLOptions setValue:[NSNumber numberWithBool:NO] forKey:(__bridge id)kCFStreamSSLValidatesCertificateChain];
+ }
+
+#ifdef DEBUG
+ [SSLOptions setValue:[NSNumber numberWithBool:NO] forKey:(__bridge id)kCFStreamSSLValidatesCertificateChain];
+ NSLog(@"SocketRocket: In debug mode. Allowing connection to any root cert");
+#endif
+
+ [_outputStream setProperty:SSLOptions
+ forKey:(__bridge id)kCFStreamPropertySSLSettings];
+ }
+
+ _inputStream.delegate = self;
+ _outputStream.delegate = self;
+}
+
+- (void)_connect;
+{
+ if (!_scheduledRunloops.count) {
+ [self scheduleInRunLoop:[NSRunLoop SR_networkRunLoop] forMode:NSDefaultRunLoopMode];
+ }
+
+
+ [_outputStream open];
+ [_inputStream open];
+}
+
+- (void)scheduleInRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode;
+{
+ [_outputStream scheduleInRunLoop:aRunLoop forMode:mode];
+ [_inputStream scheduleInRunLoop:aRunLoop forMode:mode];
+
+ [_scheduledRunloops addObject:@[aRunLoop, mode]];
+}
+
+- (void)unscheduleFromRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode;
+{
+ [_outputStream removeFromRunLoop:aRunLoop forMode:mode];
+ [_inputStream removeFromRunLoop:aRunLoop forMode:mode];
+
+ [_scheduledRunloops removeObject:@[aRunLoop, mode]];
+}
+
+- (void)close;
+{
+ [self closeWithCode:SRStatusCodeNormal reason:nil];
+}
+
+- (void)closeWithCode:(NSInteger)code reason:(NSString *)reason;
+{
+ assert(code);
+ dispatch_async(_workQueue, ^{
+ if (self.readyState == SR_CLOSING || self.readyState == SR_CLOSED) {
+ return;
+ }
+
+ BOOL wasConnecting = self.readyState == SR_CONNECTING;
+
+ self.readyState = SR_CLOSING;
+
+ SRFastLog(@"Closing with code %d reason %@", code, reason);
+
+ if (wasConnecting) {
+ [self _disconnect];
+ return;
+ }
+
+ size_t maxMsgSize = [reason maximumLengthOfBytesUsingEncoding:NSUTF8StringEncoding];
+ NSMutableData *mutablePayload = [[NSMutableData alloc] initWithLength:sizeof(uint16_t) + maxMsgSize];
+ NSData *payload = mutablePayload;
+
+ ((uint16_t *)mutablePayload.mutableBytes)[0] = EndianU16_BtoN(code);
+
+ if (reason) {
+ NSRange remainingRange = {0};
+
+ NSUInteger usedLength = 0;
+
+ BOOL success = [reason getBytes:(char *)mutablePayload.mutableBytes + sizeof(uint16_t) maxLength:payload.length - sizeof(uint16_t) usedLength:&usedLength encoding:NSUTF8StringEncoding options:NSStringEncodingConversionExternalRepresentation range:NSMakeRange(0, reason.length) remainingRange:&remainingRange];
+
+ assert(success);
+ assert(remainingRange.length == 0);
+
+ if (usedLength != maxMsgSize) {
+ payload = [payload subdataWithRange:NSMakeRange(0, usedLength + sizeof(uint16_t))];
+ }
+ }
+
+
+ [self _sendFrameWithOpcode:SROpCodeConnectionClose data:payload];
+ });
+}
+
+- (void)_closeWithProtocolError:(NSString *)message;
+{
+ // Need to shunt this on the _callbackQueue first to see if they received any messages
+ [self _performDelegateBlock:^{
+ [self closeWithCode:SRStatusCodeProtocolError reason:message];
+ dispatch_async(self->_workQueue, ^{
+ [self _disconnect];
+ });
+ }];
+}
+
+- (void)_failWithError:(NSError *)error;
+{
+ dispatch_async(_workQueue, ^{
+ if (self.readyState != SR_CLOSED) {
+ self->_failed = YES;
+ [self _performDelegateBlock:^{
+ if ([self.delegate respondsToSelector:@selector(webSocket:didFailWithError:)]) {
+ [self.delegate webSocket:self didFailWithError:error];
+ }
+ }];
+
+ self.readyState = SR_CLOSED;
+ self->_selfRetain = nil;
+
+ SRFastLog(@"Failing with error %@", error.localizedDescription);
+
+ [self _disconnect];
+ }
+ });
+}
+
+- (void)_writeData:(NSData *)data;
+{
+ [self assertOnWorkQueue];
+
+ if (_closeWhenFinishedWriting) {
+ return;
+ }
+ [_outputBuffer appendData:data];
+ [self _pumpWriting];
+}
+
+- (void)send:(id)data;
+{
+ NSAssert(self.readyState != SR_CONNECTING, @"Invalid State: Cannot call send: until connection is open");
+ // TODO: maybe not copy this for performance
+ data = [data copy];
+ dispatch_async(_workQueue, ^{
+ if ([data isKindOfClass:[NSString class]]) {
+ [self _sendFrameWithOpcode:SROpCodeTextFrame data:[(NSString *)data dataUsingEncoding:NSUTF8StringEncoding]];
+ } else if ([data isKindOfClass:[NSData class]]) {
+ [self _sendFrameWithOpcode:SROpCodeBinaryFrame data:data];
+ } else if (data == nil) {
+ [self _sendFrameWithOpcode:SROpCodeTextFrame data:data];
+ } else {
+ assert(NO);
+ }
+ });
+}
+
+- (void)sendPing:(NSData *)data;
+{
+ NSAssert(self.readyState == SR_OPEN, @"Invalid State: Cannot call send: until connection is open");
+ // TODO: maybe not copy this for performance
+ data = [data copy] ?: [NSData data]; // It's okay for a ping to be empty
+ dispatch_async(_workQueue, ^{
+ [self _sendFrameWithOpcode:SROpCodePing data:data];
+ });
+}
+
+- (void)handlePing:(NSData *)pingData;
+{
+ // Need to pingpong this off _callbackQueue first to make sure messages happen in order
+ [self _performDelegateBlock:^{
+ dispatch_async(self->_workQueue, ^{
+ [self _sendFrameWithOpcode:SROpCodePong data:pingData];
+ });
+ }];
+}
+
+- (void)handlePong:(NSData *)pongData;
+{
+ SRFastLog(@"Received pong");
+ [self _performDelegateBlock:^{
+ if ([self.delegate respondsToSelector:@selector(webSocket:didReceivePong:)]) {
+ [self.delegate webSocket:self didReceivePong:pongData];
+ }
+ }];
+}
+
+- (void)_handleMessage:(id)message
+{
+ SRFastLog(@"Received message");
+ [self _performDelegateBlock:^{
+ [self.delegate webSocket:self didReceiveMessage:message];
+ }];
+}
+
+
+static inline BOOL closeCodeIsValid(int closeCode) {
+ if (closeCode < 1000) {
+ return NO;
+ }
+
+ if (closeCode >= 1000 && closeCode <= 1011) {
+ if (closeCode == 1004 ||
+ closeCode == 1005 ||
+ closeCode == 1006) {
+ return NO;
+ }
+ return YES;
+ }
+
+ if (closeCode >= 3000 && closeCode <= 3999) {
+ return YES;
+ }
+
+ if (closeCode >= 4000 && closeCode <= 4999) {
+ return YES;
+ }
+
+ return NO;
+}
+
+// Note from RFC:
+//
+// If there is a body, the first two
+// bytes of the body MUST be a 2-byte unsigned integer (in network byte
+// order) representing a status code with value /code/ defined in
+// Section 7.4. Following the 2-byte integer the body MAY contain UTF-8
+// encoded data with value /reason/, the interpretation of which is not
+// defined by this specification.
+
+- (void)handleCloseWithData:(NSData *)data;
+{
+ size_t dataSize = data.length;
+ __block uint16_t closeCode = 0;
+
+ SRFastLog(@"Received close frame");
+
+ if (dataSize == 1) {
+ // TODO handle error
+ [self _closeWithProtocolError:@"Payload for close must be larger than 2 bytes"];
+ return;
+ } else if (dataSize >= 2) {
+ [data getBytes:&closeCode length:sizeof(closeCode)];
+ _closeCode = EndianU16_BtoN(closeCode);
+ if (!closeCodeIsValid(_closeCode)) {
+ [self _closeWithProtocolError:[NSString stringWithFormat:@"Cannot have close code of %d", _closeCode]];
+ return;
+ }
+ if (dataSize > 2) {
+ _closeReason = [[NSString alloc] initWithData:[data subdataWithRange:NSMakeRange(2, dataSize - 2)] encoding:NSUTF8StringEncoding];
+ if (!_closeReason) {
+ [self _closeWithProtocolError:@"Close reason MUST be valid UTF-8"];
+ return;
+ }
+ }
+ } else {
+ _closeCode = SRStatusNoStatusReceived;
+ }
+
+ [self assertOnWorkQueue];
+
+ if (self.readyState == SR_OPEN) {
+ [self closeWithCode:1000 reason:nil];
+ }
+ dispatch_async(_workQueue, ^{
+ [self _disconnect];
+ });
+}
+
+- (void)_disconnect;
+{
+ [self assertOnWorkQueue];
+ SRFastLog(@"Trying to disconnect");
+ _closeWhenFinishedWriting = YES;
+ [self _pumpWriting];
+}
+
+- (void)_handleFrameWithData:(NSData *)frameData opCode:(NSInteger)opcode;
+{
+ // Check that the current data is valid UTF8
+
+ BOOL isControlFrame = (opcode == SROpCodePing || opcode == SROpCodePong || opcode == SROpCodeConnectionClose);
+ if (!isControlFrame) {
+ [self _readFrameNew];
+ } else {
+ dispatch_async(_workQueue, ^{
+ [self _readFrameContinue];
+ });
+ }
+
+ switch (opcode) {
+ case SROpCodeTextFrame: {
+ NSString *str = [[NSString alloc] initWithData:frameData encoding:NSUTF8StringEncoding];
+ if (str == nil && frameData) {
+ [self closeWithCode:SRStatusCodeInvalidUTF8 reason:@"Text frames must be valid UTF-8"];
+ dispatch_async(_workQueue, ^{
+ [self _disconnect];
+ });
+
+ return;
+ }
+ [self _handleMessage:str];
+ break;
+ }
+ case SROpCodeBinaryFrame:
+ [self _handleMessage:[frameData copy]];
+ break;
+ case SROpCodeConnectionClose:
+ [self handleCloseWithData:frameData];
+ break;
+ case SROpCodePing:
+ [self handlePing:frameData];
+ break;
+ case SROpCodePong:
+ [self handlePong:frameData];
+ break;
+ default:
+ [self _closeWithProtocolError:[NSString stringWithFormat:@"Unknown opcode %ld", (long)opcode]];
+ // TODO: Handle invalid opcode
+ break;
+ }
+}
+
+- (void)_handleFrameHeader:(frame_header)frame_header curData:(NSData *)curData;
+{
+ assert(frame_header.opcode != 0);
+
+ if (self.readyState != SR_OPEN) {
+ return;
+ }
+
+
+ BOOL isControlFrame = (frame_header.opcode == SROpCodePing || frame_header.opcode == SROpCodePong || frame_header.opcode == SROpCodeConnectionClose);
+
+ if (isControlFrame && !frame_header.fin) {
+ [self _closeWithProtocolError:@"Fragmented control frames not allowed"];
+ return;
+ }
+
+ if (isControlFrame && frame_header.payload_length >= 126) {
+ [self _closeWithProtocolError:@"Control frames cannot have payloads larger than 126 bytes"];
+ return;
+ }
+
+ if (!isControlFrame) {
+ _currentFrameOpcode = frame_header.opcode;
+ _currentFrameCount += 1;
+ }
+
+ if (frame_header.payload_length == 0) {
+ if (isControlFrame) {
+ [self _handleFrameWithData:curData opCode:frame_header.opcode];
+ } else {
+ if (frame_header.fin) {
+ [self _handleFrameWithData:_currentFrameData opCode:frame_header.opcode];
+ } else {
+ // TODO add assert that opcode is not a control;
+ [self _readFrameContinue];
+ }
+ }
+ } else {
+ assert(frame_header.payload_length <= SIZE_T_MAX);
+ [self _addConsumerWithDataLength:(size_t)frame_header.payload_length callback:^(SRWebSocket *self, NSData *newData) {
+ if (isControlFrame) {
+ [self _handleFrameWithData:newData opCode:frame_header.opcode];
+ } else {
+ if (frame_header.fin) {
+ [self _handleFrameWithData:self->_currentFrameData opCode:frame_header.opcode];
+ } else {
+ // TODO add assert that opcode is not a control;
+ [self _readFrameContinue];
+ }
+
+ }
+ } readToCurrentFrame:!isControlFrame unmaskBytes:frame_header.masked];
+ }
+}
+
+/* From RFC:
+
+ 0 1 2 3
+ 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ +-+-+-+-+-------+-+-------------+-------------------------------+
+ |F|R|R|R| opcode|M| Payload len | Extended payload length |
+ |I|S|S|S| (4) |A| (7) | (16/64) |
+ |N|V|V|V| |S| | (if payload len==126/127) |
+ | |1|2|3| |K| | |
+ +-+-+-+-+-------+-+-------------+ - - - - - - - - - - - - - - - +
+ | Extended payload length continued, if payload len == 127 |
+ + - - - - - - - - - - - - - - - +-------------------------------+
+ | |Masking-key, if MASK set to 1 |
+ +-------------------------------+-------------------------------+
+ | Masking-key (continued) | Payload Data |
+ +-------------------------------- - - - - - - - - - - - - - - - +
+ : Payload Data continued ... :
+ + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +
+ | Payload Data continued ... |
+ +---------------------------------------------------------------+
+ */
+
+static const uint8_t SRFinMask = 0x80;
+static const uint8_t SROpCodeMask = 0x0F;
+static const uint8_t SRRsvMask = 0x70;
+static const uint8_t SRMaskMask = 0x80;
+static const uint8_t SRPayloadLenMask = 0x7F;
+
+
+- (void)_readFrameContinue;
+{
+ assert((_currentFrameCount == 0 && _currentFrameOpcode == 0) || (_currentFrameCount > 0 && _currentFrameOpcode > 0));
+
+ [self _addConsumerWithDataLength:2 callback:^(SRWebSocket *self, NSData *data) {
+ __block frame_header header = {0};
+
+ const uint8_t *headerBuffer = data.bytes;
+ assert(data.length >= 2);
+
+ if (headerBuffer[0] & SRRsvMask) {
+ [self _closeWithProtocolError:@"Server used RSV bits"];
+ return;
+ }
+
+ uint8_t receivedOpcode = (SROpCodeMask & headerBuffer[0]);
+
+ BOOL isControlFrame = (receivedOpcode == SROpCodePing || receivedOpcode == SROpCodePong || receivedOpcode == SROpCodeConnectionClose);
+
+ if (!isControlFrame && receivedOpcode != 0 && self->_currentFrameCount > 0) {
+ [self _closeWithProtocolError:@"all data frames after the initial data frame must have opcode 0"];
+ return;
+ }
+
+ if (receivedOpcode == 0 && self->_currentFrameCount == 0) {
+ [self _closeWithProtocolError:@"cannot continue a message"];
+ return;
+ }
+
+ header.opcode = receivedOpcode == 0 ? self->_currentFrameOpcode : receivedOpcode;
+
+ header.fin = !!(SRFinMask & headerBuffer[0]);
+
+
+ header.masked = !!(SRMaskMask & headerBuffer[1]);
+ header.payload_length = SRPayloadLenMask & headerBuffer[1];
+
+ headerBuffer = NULL;
+
+ if (header.masked) {
+ [self _closeWithProtocolError:@"Client must receive unmasked data"];
+ }
+
+ size_t extra_bytes_needed = header.masked ? sizeof(self->_currentReadMaskKey) : 0;
+
+ if (header.payload_length == 126) {
+ extra_bytes_needed += sizeof(uint16_t);
+ } else if (header.payload_length == 127) {
+ extra_bytes_needed += sizeof(uint64_t);
+ }
+
+ if (extra_bytes_needed == 0) {
+ [self _handleFrameHeader:header curData:self->_currentFrameData];
+ } else {
+ [self _addConsumerWithDataLength:extra_bytes_needed callback:^(SRWebSocket *self, NSData *data) {
+ size_t mapped_size = data.length;
+ const void *mapped_buffer = data.bytes;
+ size_t offset = 0;
+
+ if (header.payload_length == 126) {
+ assert(mapped_size >= sizeof(uint16_t));
+ uint16_t newLen = EndianU16_BtoN(*(uint16_t *)(mapped_buffer));
+ header.payload_length = newLen;
+ offset += sizeof(uint16_t);
+ } else if (header.payload_length == 127) {
+ assert(mapped_size >= sizeof(uint64_t));
+ header.payload_length = EndianU64_BtoN(*(uint64_t *)(mapped_buffer));
+ offset += sizeof(uint64_t);
+ } else {
+ assert(header.payload_length < 126 && header.payload_length >= 0);
+ }
+
+
+ if (header.masked) {
+ assert(mapped_size >= sizeof(self->_currentReadMaskOffset) + offset);
+ memcpy(self->_currentReadMaskKey,
+ ((uint8_t *)mapped_buffer) + offset,
+ sizeof(self->_currentReadMaskKey));
+ }
+
+ [self _handleFrameHeader:header curData:self->_currentFrameData];
+ } readToCurrentFrame:NO unmaskBytes:NO];
+ }
+ } readToCurrentFrame:NO unmaskBytes:NO];
+}
+
+- (void)_readFrameNew;
+{
+ dispatch_async(_workQueue, ^{
+ [self->_currentFrameData setLength:0];
+
+ self->_currentFrameOpcode = 0;
+ self->_currentFrameCount = 0;
+ self->_readOpCount = 0;
+ self->_currentStringScanPosition = 0;
+
+ [self _readFrameContinue];
+ });
+}
+
+- (void)_pumpWriting;
+{
+ [self assertOnWorkQueue];
+
+ NSUInteger dataLength = _outputBuffer.length;
+ if (dataLength - _outputBufferOffset > 0 && _outputStream.hasSpaceAvailable) {
+ NSInteger bytesWritten = [_outputStream write:_outputBuffer.bytes + _outputBufferOffset maxLength:dataLength - _outputBufferOffset];
+ if (bytesWritten == -1) {
+ [self _failWithError:[NSError errorWithDomain:SRWebSocketErrorDomain code:2145 userInfo:[NSDictionary dictionaryWithObject:@"Error writing to stream" forKey:NSLocalizedDescriptionKey]]];
+ return;
+ }
+
+ _outputBufferOffset += bytesWritten;
+
+ if (_outputBufferOffset > 4096 && _outputBufferOffset > (_outputBuffer.length >> 1)) {
+ _outputBuffer = [[NSMutableData alloc] initWithBytes:(char *)_outputBuffer.bytes + _outputBufferOffset length:_outputBuffer.length - _outputBufferOffset];
+ _outputBufferOffset = 0;
+ }
+ }
+
+ if (_closeWhenFinishedWriting &&
+ _outputBuffer.length - _outputBufferOffset == 0 &&
+ (_inputStream.streamStatus != NSStreamStatusNotOpen &&
+ _inputStream.streamStatus != NSStreamStatusClosed) &&
+ !_sentClose) {
+ _sentClose = YES;
+
+ [_outputStream close];
+ [_inputStream close];
+
+
+ for (NSArray *runLoop in [_scheduledRunloops copy]) {
+ [self unscheduleFromRunLoop:[runLoop objectAtIndex:0] forMode:[runLoop objectAtIndex:1]];
+ }
+
+ if (!_failed) {
+ [self _performDelegateBlock:^{
+ if ([self.delegate respondsToSelector:@selector(webSocket:didCloseWithCode:reason:wasClean:)]) {
+ [self.delegate webSocket:self
+ didCloseWithCode:self->_closeCode
+ reason:self->_closeReason
+ wasClean:YES];
+ }
+ }];
+ }
+
+ _selfRetain = nil;
+ }
+}
+
+- (void)_addConsumerWithScanner:(stream_scanner)consumer callback:(data_callback)callback;
+{
+ [self assertOnWorkQueue];
+ [self _addConsumerWithScanner:consumer callback:callback dataLength:0];
+}
+
+- (void)_addConsumerWithDataLength:(size_t)dataLength callback:(data_callback)callback readToCurrentFrame:(BOOL)readToCurrentFrame unmaskBytes:(BOOL)unmaskBytes;
+{
+ [self assertOnWorkQueue];
+ assert(dataLength);
+
+ [_consumers addObject:[_consumerPool consumerWithScanner:nil handler:callback bytesNeeded:dataLength readToCurrentFrame:readToCurrentFrame unmaskBytes:unmaskBytes]];
+ [self _pumpScanner];
+}
+
+- (void)_addConsumerWithScanner:(stream_scanner)consumer callback:(data_callback)callback dataLength:(size_t)dataLength;
+{
+ [self assertOnWorkQueue];
+ [_consumers addObject:[_consumerPool consumerWithScanner:consumer handler:callback bytesNeeded:dataLength readToCurrentFrame:NO unmaskBytes:NO]];
+ [self _pumpScanner];
+}
+
+
+static const char CRLFCRLFBytes[] = {'\r', '\n', '\r', '\n'};
+
+- (void)_readUntilHeaderCompleteWithCallback:(data_callback)dataHandler;
+{
+ [self _readUntilBytes:CRLFCRLFBytes length:sizeof(CRLFCRLFBytes) callback:dataHandler];
+}
+
+- (void)_readUntilBytes:(const void *)bytes length:(size_t)length callback:(data_callback)dataHandler;
+{
+ // TODO optimize so this can continue from where we last searched
+ stream_scanner consumer = ^size_t(NSData *data) {
+ __block size_t found_size = 0;
+ __block size_t match_count = 0;
+
+ size_t size = data.length;
+ const unsigned char *buffer = data.bytes;
+ for (size_t i = 0; i < size; i++ ) {
+ if (((const unsigned char *)buffer)[i] == ((const unsigned char *)bytes)[match_count]) {
+ match_count += 1;
+ if (match_count == length) {
+ found_size = i + 1;
+ break;
+ }
+ } else {
+ match_count = 0;
+ }
+ }
+ return found_size;
+ };
+ [self _addConsumerWithScanner:consumer callback:dataHandler];
+}
+
+
+// Returns true if did work
+- (BOOL)_innerPumpScanner {
+
+ BOOL didWork = NO;
+
+ if (self.readyState >= SR_CLOSING) {
+ return didWork;
+ }
+
+ if (!_consumers.count) {
+ return didWork;
+ }
+
+ size_t curSize = _readBuffer.length - _readBufferOffset;
+ if (!curSize) {
+ return didWork;
+ }
+
+ SRIOConsumer *consumer = [_consumers objectAtIndex:0];
+
+ size_t bytesNeeded = consumer.bytesNeeded;
+
+ size_t foundSize = 0;
+ if (consumer.consumer) {
+ NSData *tempView = [NSData dataWithBytesNoCopy:(char *)_readBuffer.bytes + _readBufferOffset length:_readBuffer.length - _readBufferOffset freeWhenDone:NO];
+ foundSize = consumer.consumer(tempView);
+ } else {
+ assert(consumer.bytesNeeded);
+ if (curSize >= bytesNeeded) {
+ foundSize = bytesNeeded;
+ } else if (consumer.readToCurrentFrame) {
+ foundSize = curSize;
+ }
+ }
+
+ NSData *slice = nil;
+ if (consumer.readToCurrentFrame || foundSize) {
+ NSRange sliceRange = NSMakeRange(_readBufferOffset, foundSize);
+ slice = [_readBuffer subdataWithRange:sliceRange];
+
+ _readBufferOffset += foundSize;
+
+ if (_readBufferOffset > 4096 && _readBufferOffset > (_readBuffer.length >> 1)) {
+ _readBuffer = [[NSMutableData alloc] initWithBytes:(char *)_readBuffer.bytes + _readBufferOffset length:_readBuffer.length - _readBufferOffset]; _readBufferOffset = 0;
+ }
+
+ if (consumer.unmaskBytes) {
+ NSMutableData *mutableSlice = [slice mutableCopy];
+
+ NSUInteger len = mutableSlice.length;
+ uint8_t *bytes = mutableSlice.mutableBytes;
+
+ for (NSUInteger i = 0; i < len; i++) {
+ bytes[i] = bytes[i] ^ _currentReadMaskKey[_currentReadMaskOffset % sizeof(_currentReadMaskKey)];
+ _currentReadMaskOffset += 1;
+ }
+
+ slice = mutableSlice;
+ }
+
+ if (consumer.readToCurrentFrame) {
+ [_currentFrameData appendData:slice];
+
+ _readOpCount += 1;
+
+ if (_currentFrameOpcode == SROpCodeTextFrame) {
+ // Validate UTF8 stuff.
+ size_t currentDataSize = _currentFrameData.length;
+ if (_currentFrameOpcode == SROpCodeTextFrame && currentDataSize > 0) {
+ // TODO: Optimize the crap out of this. Don't really have to copy all the data each time
+
+ size_t scanSize = currentDataSize - _currentStringScanPosition;
+
+ NSData *scan_data = [_currentFrameData subdataWithRange:NSMakeRange(_currentStringScanPosition, scanSize)];
+ int32_t valid_utf8_size = validate_dispatch_data_partial_string(scan_data);
+
+ if (valid_utf8_size == -1) {
+ [self closeWithCode:SRStatusCodeInvalidUTF8 reason:@"Text frames must be valid UTF-8"];
+ dispatch_async(_workQueue, ^{
+ [self _disconnect];
+ });
+ return didWork;
+ } else {
+ _currentStringScanPosition += valid_utf8_size;
+ }
+ }
+
+ }
+
+ consumer.bytesNeeded -= foundSize;
+
+ if (consumer.bytesNeeded == 0) {
+ [_consumers removeObjectAtIndex:0];
+ consumer.handler(self, nil);
+ [_consumerPool returnConsumer:consumer];
+ didWork = YES;
+ }
+ } else if (foundSize) {
+ [_consumers removeObjectAtIndex:0];
+ consumer.handler(self, slice);
+ [_consumerPool returnConsumer:consumer];
+ didWork = YES;
+ }
+ }
+ return didWork;
+}
+
+-(void)_pumpScanner;
+{
+ [self assertOnWorkQueue];
+
+ if (!_isPumping) {
+ _isPumping = YES;
+ } else {
+ return;
+ }
+
+ while ([self _innerPumpScanner]) {
+
+ }
+
+ _isPumping = NO;
+}
+
+//#define NOMASK
+
+static const size_t SRFrameHeaderOverhead = 32;
+
+- (void)_sendFrameWithOpcode:(SROpCode)opcode data:(id)data;
+{
+ [self assertOnWorkQueue];
+
+ if (nil == data) {
+ return;
+ }
+
+ NSAssert([data isKindOfClass:[NSData class]] || [data isKindOfClass:[NSString class]], @"NSString or NSData");
+
+ size_t payloadLength = [data isKindOfClass:[NSString class]] ? [(NSString *)data lengthOfBytesUsingEncoding:NSUTF8StringEncoding] : [data length];
+
+ NSMutableData *frame = [[NSMutableData alloc] initWithLength:payloadLength + SRFrameHeaderOverhead];
+ if (!frame) {
+ [self closeWithCode:SRStatusCodeMessageTooBig reason:@"Message too big"];
+ return;
+ }
+ uint8_t *frame_buffer = (uint8_t *)[frame mutableBytes];
+
+ // set fin
+ frame_buffer[0] = SRFinMask | opcode;
+
+ BOOL useMask = YES;
+#ifdef NOMASK
+ useMask = NO;
+#endif
+
+ if (useMask) {
+ // set the mask and header
+ frame_buffer[1] |= SRMaskMask;
+ }
+
+ size_t frame_buffer_size = 2;
+
+ const uint8_t *unmasked_payload = NULL;
+ if ([data isKindOfClass:[NSData class]]) {
+ unmasked_payload = (uint8_t *)[data bytes];
+ } else if ([data isKindOfClass:[NSString class]]) {
+ unmasked_payload = (const uint8_t *)[data UTF8String];
+ } else {
+ return;
+ }
+
+ if (payloadLength < 126) {
+ frame_buffer[1] |= payloadLength;
+ } else if (payloadLength <= UINT16_MAX) {
+ frame_buffer[1] |= 126;
+ *((uint16_t *)(frame_buffer + frame_buffer_size)) = EndianU16_BtoN((uint16_t)payloadLength);
+ frame_buffer_size += sizeof(uint16_t);
+ } else {
+ frame_buffer[1] |= 127;
+ *((uint64_t *)(frame_buffer + frame_buffer_size)) = EndianU64_BtoN((uint64_t)payloadLength);
+ frame_buffer_size += sizeof(uint64_t);
+ }
+
+ if (!useMask) {
+ for (size_t i = 0; i < payloadLength; i++) {
+ frame_buffer[frame_buffer_size] = unmasked_payload[i];
+ frame_buffer_size += 1;
+ }
+ } else {
+ uint8_t *mask_key = frame_buffer + frame_buffer_size;
+ BOOL success = !SecRandomCopyBytes(kSecRandomDefault, sizeof(uint32_t), (uint8_t *)mask_key);
+ assert(success);
+ frame_buffer_size += sizeof(uint32_t);
+
+ // TODO: could probably optimize this with SIMD
+ for (size_t i = 0; i < payloadLength; i++) {
+ frame_buffer[frame_buffer_size] = unmasked_payload[i] ^ mask_key[i % sizeof(uint32_t)];
+ frame_buffer_size += 1;
+ }
+ }
+
+ assert(frame_buffer_size <= [frame length]);
+ frame.length = frame_buffer_size;
+
+ [self _writeData:frame];
+}
+
+- (void)stream:(NSStream *)aStream handleEvent:(NSStreamEvent)eventCode;
+{
+ if (_secure && !_pinnedCertFound && (eventCode == NSStreamEventHasBytesAvailable || eventCode == NSStreamEventHasSpaceAvailable)) {
+
+ NSArray *sslCerts = [_urlRequest SR_SSLPinnedCertificates];
+ if (sslCerts) {
+ SecTrustRef secTrust = (__bridge SecTrustRef)[aStream propertyForKey:(__bridge id)kCFStreamPropertySSLPeerTrust];
+ if (secTrust) {
+ NSInteger numCerts = SecTrustGetCertificateCount(secTrust);
+ for (NSInteger i = 0; i < numCerts && !_pinnedCertFound; i++) {
+ SecCertificateRef cert = SecTrustGetCertificateAtIndex(secTrust, i);
+ NSData *certData = CFBridgingRelease(SecCertificateCopyData(cert));
+
+ for (id ref in sslCerts) {
+ SecCertificateRef trustedCert = (__bridge SecCertificateRef)ref;
+ NSData *trustedCertData = CFBridgingRelease(SecCertificateCopyData(trustedCert));
+
+ if ([trustedCertData isEqualToData:certData]) {
+ _pinnedCertFound = YES;
+ break;
+ }
+ }
+ }
+ }
+
+ if (!_pinnedCertFound) {
+ dispatch_async(_workQueue, ^{
+ [self _failWithError:[NSError errorWithDomain:SRWebSocketErrorDomain code:23556 userInfo:[NSDictionary dictionaryWithObject:[NSString stringWithFormat:@"Invalid server cert"] forKey:NSLocalizedDescriptionKey]]];
+ });
+ return;
+ }
+ }
+ }
+
+ dispatch_async(_workQueue, ^{
+ switch (eventCode) {
+ case NSStreamEventOpenCompleted: {
+ SRFastLog(@"NSStreamEventOpenCompleted %@", aStream);
+ if (self.readyState >= SR_CLOSING) {
+ return;
+ }
+ assert(self->_readBuffer);
+
+ if (self.readyState == SR_CONNECTING && aStream == self->_inputStream) {
+ [self didConnect];
+ }
+ [self _pumpWriting];
+ [self _pumpScanner];
+ break;
+ }
+
+ case NSStreamEventErrorOccurred: {
+ SRFastLog(@"NSStreamEventErrorOccurred %@ %@", aStream, [[aStream streamError] copy]);
+ /// TODO specify error better!
+ [self _failWithError:aStream.streamError];
+ self->_readBufferOffset = 0;
+ [self->_readBuffer setLength:0];
+ break;
+
+ }
+
+ case NSStreamEventEndEncountered: {
+ [self _pumpScanner];
+ SRFastLog(@"NSStreamEventEndEncountered %@", aStream);
+ if (aStream.streamError) {
+ [self _failWithError:aStream.streamError];
+ } else {
+ if (self.readyState != SR_CLOSED) {
+ self.readyState = SR_CLOSED;
+ self->_selfRetain = nil;
+ }
+
+ if (!self->_sentClose && !self->_failed) {
+ self->_sentClose = YES;
+ // If we get closed in this state it's probably not clean because we should be
+ // sending this when we send messages
+ [self
+ _performDelegateBlock:^{
+ if ([self.delegate respondsToSelector:@selector(webSocket:didCloseWithCode:reason:wasClean:)]) {
+ [self.delegate webSocket:self
+ didCloseWithCode:SRStatusCodeGoingAway
+ reason:@"Stream end encountered"
+ wasClean:NO];
+ }
+ }];
+ }
+ }
+
+ break;
+ }
+
+ case NSStreamEventHasBytesAvailable: {
+ SRFastLog(@"NSStreamEventHasBytesAvailable %@", aStream);
+ enum EnumType : int { bufferSize = 2048 };
+ uint8_t buffer[bufferSize];
+
+ while (self->_inputStream.hasBytesAvailable) {
+ NSInteger bytes_read = [self->_inputStream read:buffer maxLength:bufferSize];
+
+ if (bytes_read > 0) {
+ [self->_readBuffer appendBytes:buffer length:bytes_read];
+ } else if (bytes_read < 0) {
+ [self _failWithError:self->_inputStream.streamError];
+ }
+
+ if (bytes_read != bufferSize) {
+ break;
+ }
+ };
+ [self _pumpScanner];
+ break;
+ }
+
+ case NSStreamEventHasSpaceAvailable: {
+ SRFastLog(@"NSStreamEventHasSpaceAvailable %@", aStream);
+ [self _pumpWriting];
+ break;
+ }
+
+ default:
+ SRFastLog(@"(default) %@", aStream);
+ break;
+ }
+ });
+}
+
+@end
+
+
+@implementation SRIOConsumer
+
+@synthesize bytesNeeded = _bytesNeeded;
+@synthesize consumer = _scanner;
+@synthesize handler = _handler;
+@synthesize readToCurrentFrame = _readToCurrentFrame;
+@synthesize unmaskBytes = _unmaskBytes;
+
+- (void)setupWithScanner:(stream_scanner)scanner handler:(data_callback)handler bytesNeeded:(size_t)bytesNeeded readToCurrentFrame:(BOOL)readToCurrentFrame unmaskBytes:(BOOL)unmaskBytes;
+{
+ _scanner = [scanner copy];
+ _handler = [handler copy];
+ _bytesNeeded = bytesNeeded;
+ _readToCurrentFrame = readToCurrentFrame;
+ _unmaskBytes = unmaskBytes;
+ assert(_scanner || _bytesNeeded);
+}
+
+
+@end
+
+
+@implementation SRIOConsumerPool {
+ NSUInteger _poolSize;
+ NSMutableArray *_bufferedConsumers;
+}
+
+- (id)initWithBufferCapacity:(NSUInteger)poolSize;
+{
+ self = [super init];
+ if (self) {
+ _poolSize = poolSize;
+ _bufferedConsumers = [[NSMutableArray alloc] initWithCapacity:poolSize];
+ }
+ return self;
+}
+
+- (id)init
+{
+ return [self initWithBufferCapacity:8];
+}
+
+- (SRIOConsumer *)consumerWithScanner:(stream_scanner)scanner handler:(data_callback)handler bytesNeeded:(size_t)bytesNeeded readToCurrentFrame:(BOOL)readToCurrentFrame unmaskBytes:(BOOL)unmaskBytes;
+{
+ SRIOConsumer *consumer = nil;
+ if (_bufferedConsumers.count) {
+ consumer = [_bufferedConsumers lastObject];
+ [_bufferedConsumers removeLastObject];
+ } else {
+ consumer = [[SRIOConsumer alloc] init];
+ }
+
+ [consumer setupWithScanner:scanner handler:handler bytesNeeded:bytesNeeded readToCurrentFrame:readToCurrentFrame unmaskBytes:unmaskBytes];
+
+ return consumer;
+}
+
+- (void)returnConsumer:(SRIOConsumer *)consumer;
+{
+ if (_bufferedConsumers.count < _poolSize) {
+ [_bufferedConsumers addObject:consumer];
+ }
+}
+
+@end
+
+
+@implementation NSURLRequest (CertificateAdditions)
+
+- (NSArray *)SR_SSLPinnedCertificates;
+{
+ return [NSURLProtocol propertyForKey:@"SR_SSLPinnedCertificates" inRequest:self];
+}
+
+@end
+
+@implementation NSMutableURLRequest (CertificateAdditions)
+
+- (NSArray *)SR_SSLPinnedCertificates;
+{
+ return [NSURLProtocol propertyForKey:@"SR_SSLPinnedCertificates" inRequest:self];
+}
+
+- (void)setSR_SSLPinnedCertificates:(NSArray *)SR_SSLPinnedCertificates;
+{
+ [NSURLProtocol setProperty:SR_SSLPinnedCertificates forKey:@"SR_SSLPinnedCertificates" inRequest:self];
+}
+
+@end
+
+@implementation NSURL (SRWebSocket)
+
+- (NSString *)SR_origin;
+{
+ NSString *scheme = [self.scheme lowercaseString];
+
+ if ([scheme isEqualToString:@"wss"]) {
+ scheme = @"https";
+ } else if ([scheme isEqualToString:@"ws"]) {
+ scheme = @"http";
+ }
+
+ if (self.port) {
+ return [NSString stringWithFormat:@"%@://%@:%@/", scheme, self.host, self.port];
+ } else {
+ return [NSString stringWithFormat:@"%@://%@/", scheme, self.host];
+ }
+}
+
+@end
+
+//#define SR_ENABLE_LOG
+
+static inline void SRFastLog(NSString *format, ...) {
+#ifdef SR_ENABLE_LOG
+ __block va_list arg_list;
+ va_start (arg_list, format);
+
+ NSString *formattedString = [[NSString alloc] initWithFormat:format arguments:arg_list];
+
+ va_end(arg_list);
+
+ NSLog(@"[SR] %@", formattedString);
+#endif
+}
+
+
+#ifdef HAS_ICU
+
+static inline int32_t validate_dispatch_data_partial_string(NSData *data) {
+ if ([data length] > INT32_MAX) {
+ // INT32_MAX is the limit so long as this Framework is using 32 bit ints everywhere.
+ return -1;
+ }
+
+ int32_t size = (int32_t)[data length];
+
+ const void * contents = [data bytes];
+ const uint8_t *str = (const uint8_t *)contents;
+
+ UChar32 codepoint = 1;
+ int32_t offset = 0;
+ int32_t lastOffset = 0;
+ while(offset < size && codepoint > 0) {
+ lastOffset = offset;
+ U8_NEXT(str, offset, size, codepoint);
+ }
+
+ if (codepoint == -1) {
+ // Check to see if the last byte is valid or whether it was just continuing
+ if (!U8_IS_LEAD(str[lastOffset]) || U8_COUNT_TRAIL_BYTES(str[lastOffset]) + lastOffset < (int32_t)size) {
+
+ size = -1;
+ } else {
+ uint8_t leadByte = str[lastOffset];
+ U8_MASK_LEAD_BYTE(leadByte, U8_COUNT_TRAIL_BYTES(leadByte));
+
+ for (int i = lastOffset + 1; i < offset; i++) {
+ if (U8_IS_SINGLE(str[i]) || U8_IS_LEAD(str[i]) || !U8_IS_TRAIL(str[i])) {
+ size = -1;
+ }
+ }
+
+ if (size != -1) {
+ size = lastOffset;
+ }
+ }
+ }
+
+ if (size != -1 && ![[NSString alloc] initWithBytesNoCopy:(char *)[data bytes] length:size encoding:NSUTF8StringEncoding freeWhenDone:NO]) {
+ size = -1;
+ }
+
+ return size;
+}
+
+#else
+
+// This is a hack, and probably not optimal
+static inline int32_t validate_dispatch_data_partial_string(NSData *data) {
+ static const int maxCodepointSize = 3;
+
+ for (int i = 0; i < maxCodepointSize; i++) {
+ NSString *str = [[NSString alloc] initWithBytesNoCopy:(char *)data.bytes length:data.length - i encoding:NSUTF8StringEncoding freeWhenDone:NO];
+ if (str) {
+ return data.length - i;
+ }
+ }
+
+ return -1;
+}
+
+#endif
+
+static _SRRunLoopThread *networkThread = nil;
+static NSRunLoop *networkRunLoop = nil;
+
+@implementation NSRunLoop (SRWebSocket)
+
++ (NSRunLoop *)SR_networkRunLoop {
+ static dispatch_once_t onceToken;
+ dispatch_once(&onceToken, ^{
+ networkThread = [[_SRRunLoopThread alloc] init];
+ networkThread.name = @"com.squareup.SocketRocket.NetworkThread";
+ [networkThread start];
+ networkRunLoop = networkThread.runLoop;
+ });
+
+ return networkRunLoop;
+}
+
+@end
+
+
+@implementation _SRRunLoopThread {
+ dispatch_group_t _waitGroup;
+}
+
+@synthesize runLoop = _runLoop;
+
+- (void)dealloc
+{
+ sr_dispatch_release(_waitGroup);
+}
+
+- (id)init
+{
+ self = [super init];
+ if (self) {
+ _waitGroup = dispatch_group_create();
+ dispatch_group_enter(_waitGroup);
+ }
+ return self;
+}
+
+- (void)main;
+{
+ @autoreleasepool {
+ _runLoop = [NSRunLoop currentRunLoop];
+ dispatch_group_leave(_waitGroup);
+
+ NSTimer *timer = [[NSTimer alloc] initWithFireDate:[NSDate distantFuture] interval:0.0 target:nil selector:nil userInfo:nil repeats:NO];
+ [_runLoop addTimer:timer forMode:NSDefaultRunLoopMode];
+
+ while ([_runLoop runMode:NSDefaultRunLoopMode beforeDate:[NSDate distantFuture]]) {
+
+ }
+ assert(NO);
+ }
+}
+
+- (NSRunLoop *)runLoop;
+{
+ dispatch_group_wait(_waitGroup, DISPATCH_TIME_FOREVER);
+ return _runLoop;
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/Icon-120.png b/third_party/libwebrtc/examples/objc/Icon-120.png
new file mode 100644
index 0000000000..938fef477b
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/Icon-120.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/Icon-180.png b/third_party/libwebrtc/examples/objc/Icon-180.png
new file mode 100644
index 0000000000..a5b7609680
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/Icon-180.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/Icon.png b/third_party/libwebrtc/examples/objc/Icon.png
new file mode 100644
index 0000000000..55773ca9d9
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/Icon.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/README b/third_party/libwebrtc/examples/objc/README
new file mode 100644
index 0000000000..bfe18b37c5
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/README
@@ -0,0 +1,3 @@
+This directory contains sample iOS and mac clients for http://apprtc.appspot.com
+
+See ../../app/webrtc/objc/README for information on how to use it.
diff --git a/third_party/libwebrtc/examples/objcnativeapi/Info.plist b/third_party/libwebrtc/examples/objcnativeapi/Info.plist
new file mode 100644
index 0000000000..cbc9e5f9f3
--- /dev/null
+++ b/third_party/libwebrtc/examples/objcnativeapi/Info.plist
@@ -0,0 +1,45 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleExecutable</key>
+ <string>$(EXECUTABLE_NAME)</string>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.ObjCNativeAPIDemo</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>ObjCNativeAPIDemo</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>LSRequiresIPhoneOS</key>
+ <true/>
+ <key>UIRequiredDeviceCapabilities</key>
+ <array>
+ <string>armv7</string>
+ </array>
+ <key>UISupportedInterfaceOrientations</key>
+ <array>
+ <string>UIInterfaceOrientationPortrait</string>
+ <string>UIInterfaceOrientationLandscapeLeft</string>
+ <string>UIInterfaceOrientationLandscapeRight</string>
+ </array>
+ <key>UISupportedInterfaceOrientations~ipad</key>
+ <array>
+ <string>UIInterfaceOrientationPortrait</string>
+ <string>UIInterfaceOrientationPortraitUpsideDown</string>
+ <string>UIInterfaceOrientationLandscapeLeft</string>
+ <string>UIInterfaceOrientationLandscapeRight</string>
+ </array>
+ <key>NSCameraUsageDescription</key>
+ <string>Camera access needed for video calling</string>
+ <key>NSMicrophoneUsageDescription</key>
+ <string>Microphone access needed for video calling</string>
+</dict>
+</plist>
diff --git a/third_party/libwebrtc/examples/objcnativeapi/objc/NADAppDelegate.h b/third_party/libwebrtc/examples/objcnativeapi/objc/NADAppDelegate.h
new file mode 100644
index 0000000000..02372dbfd2
--- /dev/null
+++ b/third_party/libwebrtc/examples/objcnativeapi/objc/NADAppDelegate.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+@interface NADAppDelegate : UIResponder <UIApplicationDelegate>
+
+@property(strong, nonatomic) UIWindow* window;
+
+@end
diff --git a/third_party/libwebrtc/examples/objcnativeapi/objc/NADAppDelegate.m b/third_party/libwebrtc/examples/objcnativeapi/objc/NADAppDelegate.m
new file mode 100644
index 0000000000..254dd3be76
--- /dev/null
+++ b/third_party/libwebrtc/examples/objcnativeapi/objc/NADAppDelegate.m
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "NADAppDelegate.h"
+
+#import "NADViewController.h"
+
+@interface NADAppDelegate ()
+@end
+
+@implementation NADAppDelegate
+
+@synthesize window = _window;
+
+- (BOOL)application:(UIApplication *)application
+ didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
+ _window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]];
+ [_window makeKeyAndVisible];
+
+ NADViewController *viewController = [[NADViewController alloc] init];
+ _window.rootViewController = viewController;
+
+ return YES;
+}
+
+- (void)applicationWillResignActive:(UIApplication *)application {
+ // Sent when the application is about to move from active to inactive state. This can occur for
+ // certain types of temporary interruptions (such as an incoming phone call or SMS message) or
+ // when the user quits the application and it begins the transition to the background state. Use
+ // this method to pause ongoing tasks, disable timers, and invalidate graphics rendering
+ // callbacks. Games should use this method to pause the game.
+}
+
+- (void)applicationDidEnterBackground:(UIApplication *)application {
+ // Use this method to release shared resources, save user data, invalidate timers, and store
+ // enough application state information to restore your application to its current state in case
+ // it is terminated later. If your application supports background execution, this method is
+ // called instead of applicationWillTerminate: when the user quits.
+}
+
+- (void)applicationWillEnterForeground:(UIApplication *)application {
+ // Called as part of the transition from the background to the active state; here you can undo
+ // many of the changes made on entering the background.
+}
+
+- (void)applicationDidBecomeActive:(UIApplication *)application {
+ // Restart any tasks that were paused (or not yet started) while the application was inactive. If
+ // the application was previously in the background, optionally refresh the user interface.
+}
+
+- (void)applicationWillTerminate:(UIApplication *)application {
+ // Called when the application is about to terminate. Save data if appropriate. See also
+ // applicationDidEnterBackground:.
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objcnativeapi/objc/NADViewController.h b/third_party/libwebrtc/examples/objcnativeapi/objc/NADViewController.h
new file mode 100644
index 0000000000..c43bebb52d
--- /dev/null
+++ b/third_party/libwebrtc/examples/objcnativeapi/objc/NADViewController.h
@@ -0,0 +1,15 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+@interface NADViewController : UIViewController
+
+@end
diff --git a/third_party/libwebrtc/examples/objcnativeapi/objc/NADViewController.mm b/third_party/libwebrtc/examples/objcnativeapi/objc/NADViewController.mm
new file mode 100644
index 0000000000..fd244799f8
--- /dev/null
+++ b/third_party/libwebrtc/examples/objcnativeapi/objc/NADViewController.mm
@@ -0,0 +1,154 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "NADViewController.h"
+
+#import "sdk/objc/base/RTCVideoRenderer.h"
+#import "sdk/objc/components/capturer/RTCCameraVideoCapturer.h"
+#import "sdk/objc/components/renderer/metal/RTCMTLVideoView.h"
+#import "sdk/objc/helpers/RTCCameraPreviewView.h"
+
+#include <memory>
+
+#include "examples/objcnativeapi/objc/objc_call_client.h"
+
+@interface NADViewController ()
+
+@property(nonatomic) RTC_OBJC_TYPE(RTCCameraVideoCapturer) * capturer;
+@property(nonatomic) RTC_OBJC_TYPE(RTCCameraPreviewView) * localVideoView;
+@property(nonatomic) __kindof UIView<RTC_OBJC_TYPE(RTCVideoRenderer)> *remoteVideoView;
+@property(nonatomic) UIButton *callButton;
+@property(nonatomic) UIButton *hangUpButton;
+
+@end
+
+@implementation NADViewController {
+ std::unique_ptr<webrtc_examples::ObjCCallClient> _call_client;
+
+ UIView *_view;
+}
+
+@synthesize capturer = _capturer;
+@synthesize localVideoView = _localVideoView;
+@synthesize remoteVideoView = _remoteVideoView;
+@synthesize callButton = _callButton;
+@synthesize hangUpButton = _hangUpButton;
+
+#pragma mark - View controller lifecycle
+
+- (void)loadView {
+ _view = [[UIView alloc] initWithFrame:CGRectZero];
+
+ _remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero];
+ _remoteVideoView.translatesAutoresizingMaskIntoConstraints = NO;
+ [_view addSubview:_remoteVideoView];
+
+ _localVideoView = [[RTC_OBJC_TYPE(RTCCameraPreviewView) alloc] initWithFrame:CGRectZero];
+ _localVideoView.translatesAutoresizingMaskIntoConstraints = NO;
+ [_view addSubview:_localVideoView];
+
+ _callButton = [UIButton buttonWithType:UIButtonTypeSystem];
+ _callButton.translatesAutoresizingMaskIntoConstraints = NO;
+ [_callButton setTitle:@"Call" forState:UIControlStateNormal];
+ [_callButton addTarget:self action:@selector(call:) forControlEvents:UIControlEventTouchUpInside];
+ [_view addSubview:_callButton];
+
+ _hangUpButton = [UIButton buttonWithType:UIButtonTypeSystem];
+ _hangUpButton.translatesAutoresizingMaskIntoConstraints = NO;
+ [_hangUpButton setTitle:@"Hang up" forState:UIControlStateNormal];
+ [_hangUpButton addTarget:self
+ action:@selector(hangUp:)
+ forControlEvents:UIControlEventTouchUpInside];
+ [_view addSubview:_hangUpButton];
+
+ UILayoutGuide *margin = _view.layoutMarginsGuide;
+ [_remoteVideoView.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor].active = YES;
+ [_remoteVideoView.topAnchor constraintEqualToAnchor:margin.topAnchor].active = YES;
+ [_remoteVideoView.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor].active = YES;
+ [_remoteVideoView.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor].active = YES;
+
+ [_localVideoView.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor constant:8.0].active =
+ YES;
+ [_localVideoView.topAnchor constraintEqualToAnchor:margin.topAnchor constant:8.0].active = YES;
+ [_localVideoView.widthAnchor constraintEqualToConstant:60].active = YES;
+ [_localVideoView.heightAnchor constraintEqualToConstant:60].active = YES;
+
+ [_callButton.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor constant:8.0].active =
+ YES;
+ [_callButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor constant:8.0].active = YES;
+ [_callButton.widthAnchor constraintEqualToConstant:100].active = YES;
+ [_callButton.heightAnchor constraintEqualToConstant:40].active = YES;
+
+ [_hangUpButton.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor constant:8.0].active =
+ YES;
+ [_hangUpButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor constant:8.0].active =
+ YES;
+ [_hangUpButton.widthAnchor constraintEqualToConstant:100].active = YES;
+ [_hangUpButton.heightAnchor constraintEqualToConstant:40].active = YES;
+
+ self.view = _view;
+}
+
+- (void)viewDidLoad {
+ [super viewDidLoad];
+
+ self.capturer = [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] init];
+ self.localVideoView.captureSession = self.capturer.captureSession;
+
+ _call_client.reset(new webrtc_examples::ObjCCallClient());
+
+ // Start capturer.
+ AVCaptureDevice *selectedDevice = nil;
+ NSArray<AVCaptureDevice *> *captureDevices =
+ [RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices];
+ for (AVCaptureDevice *device in captureDevices) {
+ if (device.position == AVCaptureDevicePositionFront) {
+ selectedDevice = device;
+ break;
+ }
+ }
+
+ AVCaptureDeviceFormat *selectedFormat = nil;
+ int targetWidth = 640;
+ int targetHeight = 480;
+ int currentDiff = INT_MAX;
+ NSArray<AVCaptureDeviceFormat *> *formats =
+ [RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:selectedDevice];
+ for (AVCaptureDeviceFormat *format in formats) {
+ CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
+ FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription);
+ int diff = abs(targetWidth - dimension.width) + abs(targetHeight - dimension.height);
+ if (diff < currentDiff) {
+ selectedFormat = format;
+ currentDiff = diff;
+ } else if (diff == currentDiff && pixelFormat == [_capturer preferredOutputPixelFormat]) {
+ selectedFormat = format;
+ }
+ }
+
+ [self.capturer startCaptureWithDevice:selectedDevice format:selectedFormat fps:30];
+}
+
+- (void)didReceiveMemoryWarning {
+ [super didReceiveMemoryWarning];
+ // Dispose of any resources that can be recreated.
+}
+
+#pragma mark - Actions
+
+- (IBAction)call:(id)sender {
+ _call_client->Call(self.capturer, self.remoteVideoView);
+}
+
+- (IBAction)hangUp:(id)sender {
+ _call_client->Hangup();
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objcnativeapi/objc/main.m b/third_party/libwebrtc/examples/objcnativeapi/objc/main.m
new file mode 100644
index 0000000000..2c3b5fbbfb
--- /dev/null
+++ b/third_party/libwebrtc/examples/objcnativeapi/objc/main.m
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+#import "NADAppDelegate.h"
+
+int main(int argc, char* argv[]) {
+ @autoreleasepool {
+ return UIApplicationMain(argc, argv, nil, NSStringFromClass([NADAppDelegate class]));
+ }
+}
diff --git a/third_party/libwebrtc/examples/objcnativeapi/objc/objc_call_client.h b/third_party/libwebrtc/examples/objcnativeapi/objc/objc_call_client.h
new file mode 100644
index 0000000000..cb8501d9ce
--- /dev/null
+++ b/third_party/libwebrtc/examples/objcnativeapi/objc/objc_call_client.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef EXAMPLES_OBJCNATIVEAPI_OBJCCALLCLIENT_H_
+#define EXAMPLES_OBJCNATIVEAPI_OBJCCALLCLIENT_H_
+
+#include <memory>
+#include <string>
+
+#import "sdk/objc/base/RTCMacros.h"
+
+#include "api/peer_connection_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "rtc_base/synchronization/mutex.h"
+
+@class RTC_OBJC_TYPE(RTCVideoCapturer);
+@protocol RTC_OBJC_TYPE
+(RTCVideoRenderer);
+
+namespace webrtc_examples {
+
+class ObjCCallClient {
+ public:
+ ObjCCallClient();
+
+ void Call(RTC_OBJC_TYPE(RTCVideoCapturer) * capturer,
+ id<RTC_OBJC_TYPE(RTCVideoRenderer)> remote_renderer);
+ void Hangup();
+
+ private:
+ class PCObserver : public webrtc::PeerConnectionObserver {
+ public:
+ explicit PCObserver(ObjCCallClient* client);
+
+ void OnSignalingChange(webrtc::PeerConnectionInterface::SignalingState new_state) override;
+ void OnDataChannel(rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel) override;
+ void OnRenegotiationNeeded() override;
+ void OnIceConnectionChange(
+ webrtc::PeerConnectionInterface::IceConnectionState new_state) override;
+ void OnIceGatheringChange(
+ webrtc::PeerConnectionInterface::IceGatheringState new_state) override;
+ void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override;
+
+ private:
+ ObjCCallClient* const client_;
+ };
+
+ void CreatePeerConnectionFactory() RTC_RUN_ON(thread_checker_);
+ void CreatePeerConnection() RTC_RUN_ON(thread_checker_);
+ void Connect() RTC_RUN_ON(thread_checker_);
+
+ webrtc::SequenceChecker thread_checker_;
+
+ bool call_started_ RTC_GUARDED_BY(thread_checker_);
+
+ const std::unique_ptr<PCObserver> pc_observer_;
+
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf_ RTC_GUARDED_BY(thread_checker_);
+ std::unique_ptr<rtc::Thread> network_thread_ RTC_GUARDED_BY(thread_checker_);
+ std::unique_ptr<rtc::Thread> worker_thread_ RTC_GUARDED_BY(thread_checker_);
+ std::unique_ptr<rtc::Thread> signaling_thread_ RTC_GUARDED_BY(thread_checker_);
+
+ std::unique_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> remote_sink_
+ RTC_GUARDED_BY(thread_checker_);
+ rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> video_source_
+ RTC_GUARDED_BY(thread_checker_);
+
+ webrtc::Mutex pc_mutex_;
+ rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc_ RTC_GUARDED_BY(pc_mutex_);
+};
+
+} // namespace webrtc_examples
+
+#endif // EXAMPLES_OBJCNATIVEAPI_OBJCCALLCLIENT_H_
diff --git a/third_party/libwebrtc/examples/objcnativeapi/objc/objc_call_client.mm b/third_party/libwebrtc/examples/objcnativeapi/objc/objc_call_client.mm
new file mode 100644
index 0000000000..90bcfcc35b
--- /dev/null
+++ b/third_party/libwebrtc/examples/objcnativeapi/objc/objc_call_client.mm
@@ -0,0 +1,238 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "examples/objcnativeapi/objc/objc_call_client.h"
+
+#include <memory>
+#include <utility>
+
+#import "sdk/objc/base/RTCVideoRenderer.h"
+#import "sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h"
+#import "sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h"
+#import "sdk/objc/helpers/RTCCameraPreviewView.h"
+
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_event_log/rtc_event_log_factory.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "media/engine/webrtc_media_engine.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "sdk/objc/native/api/video_capturer.h"
+#include "sdk/objc/native/api/video_decoder_factory.h"
+#include "sdk/objc/native/api/video_encoder_factory.h"
+#include "sdk/objc/native/api/video_renderer.h"
+
+namespace webrtc_examples {
+
+namespace {
+
+class CreateOfferObserver : public webrtc::CreateSessionDescriptionObserver {
+ public:
+ explicit CreateOfferObserver(rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc);
+
+ void OnSuccess(webrtc::SessionDescriptionInterface* desc) override;
+ void OnFailure(webrtc::RTCError error) override;
+
+ private:
+ const rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc_;
+};
+
+class SetRemoteSessionDescriptionObserver : public webrtc::SetRemoteDescriptionObserverInterface {
+ public:
+ void OnSetRemoteDescriptionComplete(webrtc::RTCError error) override;
+};
+
+class SetLocalSessionDescriptionObserver : public webrtc::SetLocalDescriptionObserverInterface {
+ public:
+ void OnSetLocalDescriptionComplete(webrtc::RTCError error) override;
+};
+
+} // namespace
+
+ObjCCallClient::ObjCCallClient()
+ : call_started_(false), pc_observer_(std::make_unique<PCObserver>(this)) {
+ thread_checker_.Detach();
+ CreatePeerConnectionFactory();
+}
+
+void ObjCCallClient::Call(RTC_OBJC_TYPE(RTCVideoCapturer) * capturer,
+ id<RTC_OBJC_TYPE(RTCVideoRenderer)> remote_renderer) {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+
+ webrtc::MutexLock lock(&pc_mutex_);
+ if (call_started_) {
+ RTC_LOG(LS_WARNING) << "Call already started.";
+ return;
+ }
+ call_started_ = true;
+
+ remote_sink_ = webrtc::ObjCToNativeVideoRenderer(remote_renderer);
+
+ video_source_ =
+ webrtc::ObjCToNativeVideoCapturer(capturer, signaling_thread_.get(), worker_thread_.get());
+
+ CreatePeerConnection();
+ Connect();
+}
+
+void ObjCCallClient::Hangup() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+
+ call_started_ = false;
+
+ {
+ webrtc::MutexLock lock(&pc_mutex_);
+ if (pc_ != nullptr) {
+ pc_->Close();
+ pc_ = nullptr;
+ }
+ }
+
+ remote_sink_ = nullptr;
+ video_source_ = nullptr;
+}
+
+void ObjCCallClient::CreatePeerConnectionFactory() {
+ network_thread_ = rtc::Thread::CreateWithSocketServer();
+ network_thread_->SetName("network_thread", nullptr);
+ RTC_CHECK(network_thread_->Start()) << "Failed to start thread";
+
+ worker_thread_ = rtc::Thread::Create();
+ worker_thread_->SetName("worker_thread", nullptr);
+ RTC_CHECK(worker_thread_->Start()) << "Failed to start thread";
+
+ signaling_thread_ = rtc::Thread::Create();
+ signaling_thread_->SetName("signaling_thread", nullptr);
+ RTC_CHECK(signaling_thread_->Start()) << "Failed to start thread";
+
+ webrtc::PeerConnectionFactoryDependencies dependencies;
+ dependencies.network_thread = network_thread_.get();
+ dependencies.worker_thread = worker_thread_.get();
+ dependencies.signaling_thread = signaling_thread_.get();
+ dependencies.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory();
+ cricket::MediaEngineDependencies media_deps;
+ media_deps.task_queue_factory = dependencies.task_queue_factory.get();
+ media_deps.audio_encoder_factory = webrtc::CreateBuiltinAudioEncoderFactory();
+ media_deps.audio_decoder_factory = webrtc::CreateBuiltinAudioDecoderFactory();
+ media_deps.video_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory(
+ [[RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) alloc] init]);
+ media_deps.video_decoder_factory = webrtc::ObjCToNativeVideoDecoderFactory(
+ [[RTC_OBJC_TYPE(RTCDefaultVideoDecoderFactory) alloc] init]);
+ media_deps.audio_processing = webrtc::AudioProcessingBuilder().Create();
+ dependencies.media_engine = cricket::CreateMediaEngine(std::move(media_deps));
+ RTC_LOG(LS_INFO) << "Media engine created: " << dependencies.media_engine.get();
+ dependencies.call_factory = webrtc::CreateCallFactory();
+ dependencies.event_log_factory =
+ std::make_unique<webrtc::RtcEventLogFactory>(dependencies.task_queue_factory.get());
+ pcf_ = webrtc::CreateModularPeerConnectionFactory(std::move(dependencies));
+ RTC_LOG(LS_INFO) << "PeerConnectionFactory created: " << pcf_.get();
+}
+
+void ObjCCallClient::CreatePeerConnection() {
+ webrtc::MutexLock lock(&pc_mutex_);
+ webrtc::PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
+ // Encryption has to be disabled for loopback to work.
+ webrtc::PeerConnectionFactoryInterface::Options options;
+ options.disable_encryption = true;
+ pcf_->SetOptions(options);
+ webrtc::PeerConnectionDependencies pc_dependencies(pc_observer_.get());
+ pc_ = pcf_->CreatePeerConnectionOrError(config, std::move(pc_dependencies)).MoveValue();
+ RTC_LOG(LS_INFO) << "PeerConnection created: " << pc_.get();
+
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> local_video_track =
+ pcf_->CreateVideoTrack(video_source_, "video");
+ pc_->AddTransceiver(local_video_track);
+ RTC_LOG(LS_INFO) << "Local video sink set up: " << local_video_track.get();
+
+ for (const rtc::scoped_refptr<webrtc::RtpTransceiverInterface>& tranceiver :
+ pc_->GetTransceivers()) {
+ rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> track = tranceiver->receiver()->track();
+ if (track && track->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) {
+ static_cast<webrtc::VideoTrackInterface*>(track.get())
+ ->AddOrUpdateSink(remote_sink_.get(), rtc::VideoSinkWants());
+ RTC_LOG(LS_INFO) << "Remote video sink set up: " << track.get();
+ break;
+ }
+ }
+}
+
+void ObjCCallClient::Connect() {
+ webrtc::MutexLock lock(&pc_mutex_);
+ pc_->CreateOffer(rtc::make_ref_counted<CreateOfferObserver>(pc_).get(),
+ webrtc::PeerConnectionInterface::RTCOfferAnswerOptions());
+}
+
+ObjCCallClient::PCObserver::PCObserver(ObjCCallClient* client) : client_(client) {}
+
+void ObjCCallClient::PCObserver::OnSignalingChange(
+ webrtc::PeerConnectionInterface::SignalingState new_state) {
+ RTC_LOG(LS_INFO) << "OnSignalingChange: " << new_state;
+}
+
+void ObjCCallClient::PCObserver::OnDataChannel(
+ rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel) {
+ RTC_LOG(LS_INFO) << "OnDataChannel";
+}
+
+void ObjCCallClient::PCObserver::OnRenegotiationNeeded() {
+ RTC_LOG(LS_INFO) << "OnRenegotiationNeeded";
+}
+
+void ObjCCallClient::PCObserver::OnIceConnectionChange(
+ webrtc::PeerConnectionInterface::IceConnectionState new_state) {
+ RTC_LOG(LS_INFO) << "OnIceConnectionChange: " << new_state;
+}
+
+void ObjCCallClient::PCObserver::OnIceGatheringChange(
+ webrtc::PeerConnectionInterface::IceGatheringState new_state) {
+ RTC_LOG(LS_INFO) << "OnIceGatheringChange: " << new_state;
+}
+
+void ObjCCallClient::PCObserver::OnIceCandidate(const webrtc::IceCandidateInterface* candidate) {
+ RTC_LOG(LS_INFO) << "OnIceCandidate: " << candidate->server_url();
+ webrtc::MutexLock lock(&client_->pc_mutex_);
+ RTC_DCHECK(client_->pc_ != nullptr);
+ client_->pc_->AddIceCandidate(candidate);
+}
+
+CreateOfferObserver::CreateOfferObserver(rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc)
+ : pc_(pc) {}
+
+void CreateOfferObserver::OnSuccess(webrtc::SessionDescriptionInterface* desc) {
+ std::string sdp;
+ desc->ToString(&sdp);
+ RTC_LOG(LS_INFO) << "Created offer: " << sdp;
+
+ // Ownership of desc was transferred to us, now we transfer it forward.
+ pc_->SetLocalDescription(absl::WrapUnique(desc),
+ rtc::make_ref_counted<SetLocalSessionDescriptionObserver>());
+
+ // Generate a fake answer.
+ std::unique_ptr<webrtc::SessionDescriptionInterface> answer(
+ webrtc::CreateSessionDescription(webrtc::SdpType::kAnswer, sdp));
+ pc_->SetRemoteDescription(std::move(answer),
+ rtc::make_ref_counted<SetRemoteSessionDescriptionObserver>());
+}
+
+void CreateOfferObserver::OnFailure(webrtc::RTCError error) {
+ RTC_LOG(LS_INFO) << "Failed to create offer: " << error.message();
+}
+
+void SetRemoteSessionDescriptionObserver::OnSetRemoteDescriptionComplete(webrtc::RTCError error) {
+ RTC_LOG(LS_INFO) << "Set remote description: " << error.message();
+}
+
+void SetLocalSessionDescriptionObserver::OnSetLocalDescriptionComplete(webrtc::RTCError error) {
+ RTC_LOG(LS_INFO) << "Set local description: " << error.message();
+}
+
+} // namespace webrtc_examples
diff --git a/third_party/libwebrtc/examples/peerconnection/OWNERS b/third_party/libwebrtc/examples/peerconnection/OWNERS
new file mode 100644
index 0000000000..0fba125734
--- /dev/null
+++ b/third_party/libwebrtc/examples/peerconnection/OWNERS
@@ -0,0 +1 @@
+tommi@webrtc.org
diff --git a/third_party/libwebrtc/examples/peerconnection/client/conductor.cc b/third_party/libwebrtc/examples/peerconnection/client/conductor.cc
new file mode 100644
index 0000000000..f94a981a75
--- /dev/null
+++ b/third_party/libwebrtc/examples/peerconnection/client/conductor.cc
@@ -0,0 +1,614 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "examples/peerconnection/client/conductor.h"
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "absl/memory/memory.h"
+#include "absl/types/optional.h"
+#include "api/audio/audio_mixer.h"
+#include "api/audio_codecs/audio_decoder_factory.h"
+#include "api/audio_codecs/audio_encoder_factory.h"
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/audio_options.h"
+#include "api/create_peerconnection_factory.h"
+#include "api/rtp_sender_interface.h"
+#include "api/video_codecs/video_decoder_factory.h"
+#include "api/video_codecs/video_decoder_factory_template.h"
+#include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_open_h264_adapter.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "api/video_codecs/video_encoder_factory_template.h"
+#include "api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h"
+#include "examples/peerconnection/client/defaults.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "modules/video_capture/video_capture.h"
+#include "modules/video_capture/video_capture_factory.h"
+#include "p2p/base/port_allocator.h"
+#include "pc/video_track_source.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "rtc_base/strings/json.h"
+#include "test/vcm_capturer.h"
+
+namespace {
+// Names used for a IceCandidate JSON object.
+const char kCandidateSdpMidName[] = "sdpMid";
+const char kCandidateSdpMlineIndexName[] = "sdpMLineIndex";
+const char kCandidateSdpName[] = "candidate";
+
+// Names used for a SessionDescription JSON object.
+const char kSessionDescriptionTypeName[] = "type";
+const char kSessionDescriptionSdpName[] = "sdp";
+
+class DummySetSessionDescriptionObserver
+ : public webrtc::SetSessionDescriptionObserver {
+ public:
+ static rtc::scoped_refptr<DummySetSessionDescriptionObserver> Create() {
+ return rtc::make_ref_counted<DummySetSessionDescriptionObserver>();
+ }
+ virtual void OnSuccess() { RTC_LOG(LS_INFO) << __FUNCTION__; }
+ virtual void OnFailure(webrtc::RTCError error) {
+ RTC_LOG(LS_INFO) << __FUNCTION__ << " " << ToString(error.type()) << ": "
+ << error.message();
+ }
+};
+
+class CapturerTrackSource : public webrtc::VideoTrackSource {
+ public:
+ static rtc::scoped_refptr<CapturerTrackSource> Create() {
+ const size_t kWidth = 640;
+ const size_t kHeight = 480;
+ const size_t kFps = 30;
+ std::unique_ptr<webrtc::test::VcmCapturer> capturer;
+ std::unique_ptr<webrtc::VideoCaptureModule::DeviceInfo> info(
+ webrtc::VideoCaptureFactory::CreateDeviceInfo());
+ if (!info) {
+ return nullptr;
+ }
+ int num_devices = info->NumberOfDevices();
+ for (int i = 0; i < num_devices; ++i) {
+ capturer = absl::WrapUnique(
+ webrtc::test::VcmCapturer::Create(kWidth, kHeight, kFps, i));
+ if (capturer) {
+ return rtc::make_ref_counted<CapturerTrackSource>(std::move(capturer));
+ }
+ }
+
+ return nullptr;
+ }
+
+ protected:
+ explicit CapturerTrackSource(
+ std::unique_ptr<webrtc::test::VcmCapturer> capturer)
+ : VideoTrackSource(/*remote=*/false), capturer_(std::move(capturer)) {}
+
+ private:
+ rtc::VideoSourceInterface<webrtc::VideoFrame>* source() override {
+ return capturer_.get();
+ }
+ std::unique_ptr<webrtc::test::VcmCapturer> capturer_;
+};
+
+} // namespace
+
+Conductor::Conductor(PeerConnectionClient* client, MainWindow* main_wnd)
+ : peer_id_(-1), loopback_(false), client_(client), main_wnd_(main_wnd) {
+ client_->RegisterObserver(this);
+ main_wnd->RegisterObserver(this);
+}
+
+Conductor::~Conductor() {
+ RTC_DCHECK(!peer_connection_);
+}
+
+bool Conductor::connection_active() const {
+ return peer_connection_ != nullptr;
+}
+
+void Conductor::Close() {
+ client_->SignOut();
+ DeletePeerConnection();
+}
+
+bool Conductor::InitializePeerConnection() {
+ RTC_DCHECK(!peer_connection_factory_);
+ RTC_DCHECK(!peer_connection_);
+
+ if (!signaling_thread_.get()) {
+ signaling_thread_ = rtc::Thread::CreateWithSocketServer();
+ signaling_thread_->Start();
+ }
+ peer_connection_factory_ = webrtc::CreatePeerConnectionFactory(
+ nullptr /* network_thread */, nullptr /* worker_thread */,
+ signaling_thread_.get(), nullptr /* default_adm */,
+ webrtc::CreateBuiltinAudioEncoderFactory(),
+ webrtc::CreateBuiltinAudioDecoderFactory(),
+ std::make_unique<webrtc::VideoEncoderFactoryTemplate<
+ webrtc::LibvpxVp8EncoderTemplateAdapter,
+ webrtc::LibvpxVp9EncoderTemplateAdapter,
+ webrtc::OpenH264EncoderTemplateAdapter,
+ webrtc::LibaomAv1EncoderTemplateAdapter>>(),
+ std::make_unique<webrtc::VideoDecoderFactoryTemplate<
+ webrtc::LibvpxVp8DecoderTemplateAdapter,
+ webrtc::LibvpxVp9DecoderTemplateAdapter,
+ webrtc::OpenH264DecoderTemplateAdapter,
+ webrtc::Dav1dDecoderTemplateAdapter>>(),
+ nullptr /* audio_mixer */, nullptr /* audio_processing */);
+
+ if (!peer_connection_factory_) {
+ main_wnd_->MessageBox("Error", "Failed to initialize PeerConnectionFactory",
+ true);
+ DeletePeerConnection();
+ return false;
+ }
+
+ if (!CreatePeerConnection()) {
+ main_wnd_->MessageBox("Error", "CreatePeerConnection failed", true);
+ DeletePeerConnection();
+ }
+
+ AddTracks();
+
+ return peer_connection_ != nullptr;
+}
+
+bool Conductor::ReinitializePeerConnectionForLoopback() {
+ loopback_ = true;
+ std::vector<rtc::scoped_refptr<webrtc::RtpSenderInterface>> senders =
+ peer_connection_->GetSenders();
+ peer_connection_ = nullptr;
+ // Loopback is only possible if encryption is disabled.
+ webrtc::PeerConnectionFactoryInterface::Options options;
+ options.disable_encryption = true;
+ peer_connection_factory_->SetOptions(options);
+ if (CreatePeerConnection()) {
+ for (const auto& sender : senders) {
+ peer_connection_->AddTrack(sender->track(), sender->stream_ids());
+ }
+ peer_connection_->CreateOffer(
+ this, webrtc::PeerConnectionInterface::RTCOfferAnswerOptions());
+ }
+ options.disable_encryption = false;
+ peer_connection_factory_->SetOptions(options);
+ return peer_connection_ != nullptr;
+}
+
+bool Conductor::CreatePeerConnection() {
+ RTC_DCHECK(peer_connection_factory_);
+ RTC_DCHECK(!peer_connection_);
+
+ webrtc::PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
+ webrtc::PeerConnectionInterface::IceServer server;
+ server.uri = GetPeerConnectionString();
+ config.servers.push_back(server);
+
+ webrtc::PeerConnectionDependencies pc_dependencies(this);
+ auto error_or_peer_connection =
+ peer_connection_factory_->CreatePeerConnectionOrError(
+ config, std::move(pc_dependencies));
+ if (error_or_peer_connection.ok()) {
+ peer_connection_ = std::move(error_or_peer_connection.value());
+ }
+ return peer_connection_ != nullptr;
+}
+
+void Conductor::DeletePeerConnection() {
+ main_wnd_->StopLocalRenderer();
+ main_wnd_->StopRemoteRenderer();
+ peer_connection_ = nullptr;
+ peer_connection_factory_ = nullptr;
+ peer_id_ = -1;
+ loopback_ = false;
+}
+
+void Conductor::EnsureStreamingUI() {
+ RTC_DCHECK(peer_connection_);
+ if (main_wnd_->IsWindow()) {
+ if (main_wnd_->current_ui() != MainWindow::STREAMING)
+ main_wnd_->SwitchToStreamingUI();
+ }
+}
+
+//
+// PeerConnectionObserver implementation.
+//
+
+void Conductor::OnAddTrack(
+ rtc::scoped_refptr<webrtc::RtpReceiverInterface> receiver,
+ const std::vector<rtc::scoped_refptr<webrtc::MediaStreamInterface>>&
+ streams) {
+ RTC_LOG(LS_INFO) << __FUNCTION__ << " " << receiver->id();
+ main_wnd_->QueueUIThreadCallback(NEW_TRACK_ADDED,
+ receiver->track().release());
+}
+
+void Conductor::OnRemoveTrack(
+ rtc::scoped_refptr<webrtc::RtpReceiverInterface> receiver) {
+ RTC_LOG(LS_INFO) << __FUNCTION__ << " " << receiver->id();
+ main_wnd_->QueueUIThreadCallback(TRACK_REMOVED, receiver->track().release());
+}
+
+void Conductor::OnIceCandidate(const webrtc::IceCandidateInterface* candidate) {
+ RTC_LOG(LS_INFO) << __FUNCTION__ << " " << candidate->sdp_mline_index();
+ // For loopback test. To save some connecting delay.
+ if (loopback_) {
+ if (!peer_connection_->AddIceCandidate(candidate)) {
+ RTC_LOG(LS_WARNING) << "Failed to apply the received candidate";
+ }
+ return;
+ }
+
+ Json::Value jmessage;
+ jmessage[kCandidateSdpMidName] = candidate->sdp_mid();
+ jmessage[kCandidateSdpMlineIndexName] = candidate->sdp_mline_index();
+ std::string sdp;
+ if (!candidate->ToString(&sdp)) {
+ RTC_LOG(LS_ERROR) << "Failed to serialize candidate";
+ return;
+ }
+ jmessage[kCandidateSdpName] = sdp;
+
+ Json::StreamWriterBuilder factory;
+ SendMessage(Json::writeString(factory, jmessage));
+}
+
+//
+// PeerConnectionClientObserver implementation.
+//
+
+void Conductor::OnSignedIn() {
+ RTC_LOG(LS_INFO) << __FUNCTION__;
+ main_wnd_->SwitchToPeerList(client_->peers());
+}
+
+void Conductor::OnDisconnected() {
+ RTC_LOG(LS_INFO) << __FUNCTION__;
+
+ DeletePeerConnection();
+
+ if (main_wnd_->IsWindow())
+ main_wnd_->SwitchToConnectUI();
+}
+
+void Conductor::OnPeerConnected(int id, const std::string& name) {
+ RTC_LOG(LS_INFO) << __FUNCTION__;
+ // Refresh the list if we're showing it.
+ if (main_wnd_->current_ui() == MainWindow::LIST_PEERS)
+ main_wnd_->SwitchToPeerList(client_->peers());
+}
+
+void Conductor::OnPeerDisconnected(int id) {
+ RTC_LOG(LS_INFO) << __FUNCTION__;
+ if (id == peer_id_) {
+ RTC_LOG(LS_INFO) << "Our peer disconnected";
+ main_wnd_->QueueUIThreadCallback(PEER_CONNECTION_CLOSED, NULL);
+ } else {
+ // Refresh the list if we're showing it.
+ if (main_wnd_->current_ui() == MainWindow::LIST_PEERS)
+ main_wnd_->SwitchToPeerList(client_->peers());
+ }
+}
+
+void Conductor::OnMessageFromPeer(int peer_id, const std::string& message) {
+ RTC_DCHECK(peer_id_ == peer_id || peer_id_ == -1);
+ RTC_DCHECK(!message.empty());
+
+ if (!peer_connection_.get()) {
+ RTC_DCHECK(peer_id_ == -1);
+ peer_id_ = peer_id;
+
+ if (!InitializePeerConnection()) {
+ RTC_LOG(LS_ERROR) << "Failed to initialize our PeerConnection instance";
+ client_->SignOut();
+ return;
+ }
+ } else if (peer_id != peer_id_) {
+ RTC_DCHECK(peer_id_ != -1);
+ RTC_LOG(LS_WARNING)
+ << "Received a message from unknown peer while already in a "
+ "conversation with a different peer.";
+ return;
+ }
+
+ Json::CharReaderBuilder factory;
+ std::unique_ptr<Json::CharReader> reader =
+ absl::WrapUnique(factory.newCharReader());
+ Json::Value jmessage;
+ if (!reader->parse(message.data(), message.data() + message.length(),
+ &jmessage, nullptr)) {
+ RTC_LOG(LS_WARNING) << "Received unknown message. " << message;
+ return;
+ }
+ std::string type_str;
+ std::string json_object;
+
+ rtc::GetStringFromJsonObject(jmessage, kSessionDescriptionTypeName,
+ &type_str);
+ if (!type_str.empty()) {
+ if (type_str == "offer-loopback") {
+ // This is a loopback call.
+ // Recreate the peerconnection with DTLS disabled.
+ if (!ReinitializePeerConnectionForLoopback()) {
+ RTC_LOG(LS_ERROR) << "Failed to initialize our PeerConnection instance";
+ DeletePeerConnection();
+ client_->SignOut();
+ }
+ return;
+ }
+ absl::optional<webrtc::SdpType> type_maybe =
+ webrtc::SdpTypeFromString(type_str);
+ if (!type_maybe) {
+ RTC_LOG(LS_ERROR) << "Unknown SDP type: " << type_str;
+ return;
+ }
+ webrtc::SdpType type = *type_maybe;
+ std::string sdp;
+ if (!rtc::GetStringFromJsonObject(jmessage, kSessionDescriptionSdpName,
+ &sdp)) {
+ RTC_LOG(LS_WARNING)
+ << "Can't parse received session description message.";
+ return;
+ }
+ webrtc::SdpParseError error;
+ std::unique_ptr<webrtc::SessionDescriptionInterface> session_description =
+ webrtc::CreateSessionDescription(type, sdp, &error);
+ if (!session_description) {
+ RTC_LOG(LS_WARNING)
+ << "Can't parse received session description message. "
+ "SdpParseError was: "
+ << error.description;
+ return;
+ }
+ RTC_LOG(LS_INFO) << " Received session description :" << message;
+ peer_connection_->SetRemoteDescription(
+ DummySetSessionDescriptionObserver::Create().get(),
+ session_description.release());
+ if (type == webrtc::SdpType::kOffer) {
+ peer_connection_->CreateAnswer(
+ this, webrtc::PeerConnectionInterface::RTCOfferAnswerOptions());
+ }
+ } else {
+ std::string sdp_mid;
+ int sdp_mlineindex = 0;
+ std::string sdp;
+ if (!rtc::GetStringFromJsonObject(jmessage, kCandidateSdpMidName,
+ &sdp_mid) ||
+ !rtc::GetIntFromJsonObject(jmessage, kCandidateSdpMlineIndexName,
+ &sdp_mlineindex) ||
+ !rtc::GetStringFromJsonObject(jmessage, kCandidateSdpName, &sdp)) {
+ RTC_LOG(LS_WARNING) << "Can't parse received message.";
+ return;
+ }
+ webrtc::SdpParseError error;
+ std::unique_ptr<webrtc::IceCandidateInterface> candidate(
+ webrtc::CreateIceCandidate(sdp_mid, sdp_mlineindex, sdp, &error));
+ if (!candidate.get()) {
+ RTC_LOG(LS_WARNING) << "Can't parse received candidate message. "
+ "SdpParseError was: "
+ << error.description;
+ return;
+ }
+ if (!peer_connection_->AddIceCandidate(candidate.get())) {
+ RTC_LOG(LS_WARNING) << "Failed to apply the received candidate";
+ return;
+ }
+ RTC_LOG(LS_INFO) << " Received candidate :" << message;
+ }
+}
+
+void Conductor::OnMessageSent(int err) {
+ // Process the next pending message if any.
+ main_wnd_->QueueUIThreadCallback(SEND_MESSAGE_TO_PEER, NULL);
+}
+
+void Conductor::OnServerConnectionFailure() {
+ main_wnd_->MessageBox("Error", ("Failed to connect to " + server_).c_str(),
+ true);
+}
+
+//
+// MainWndCallback implementation.
+//
+
+void Conductor::StartLogin(const std::string& server, int port) {
+ if (client_->is_connected())
+ return;
+ server_ = server;
+ client_->Connect(server, port, GetPeerName());
+}
+
+void Conductor::DisconnectFromServer() {
+ if (client_->is_connected())
+ client_->SignOut();
+}
+
+void Conductor::ConnectToPeer(int peer_id) {
+ RTC_DCHECK(peer_id_ == -1);
+ RTC_DCHECK(peer_id != -1);
+
+ if (peer_connection_.get()) {
+ main_wnd_->MessageBox(
+ "Error", "We only support connecting to one peer at a time", true);
+ return;
+ }
+
+ if (InitializePeerConnection()) {
+ peer_id_ = peer_id;
+ peer_connection_->CreateOffer(
+ this, webrtc::PeerConnectionInterface::RTCOfferAnswerOptions());
+ } else {
+ main_wnd_->MessageBox("Error", "Failed to initialize PeerConnection", true);
+ }
+}
+
+void Conductor::AddTracks() {
+ if (!peer_connection_->GetSenders().empty()) {
+ return; // Already added tracks.
+ }
+
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
+ peer_connection_factory_->CreateAudioTrack(
+ kAudioLabel,
+ peer_connection_factory_->CreateAudioSource(cricket::AudioOptions())
+ .get()));
+ auto result_or_error = peer_connection_->AddTrack(audio_track, {kStreamId});
+ if (!result_or_error.ok()) {
+ RTC_LOG(LS_ERROR) << "Failed to add audio track to PeerConnection: "
+ << result_or_error.error().message();
+ }
+
+ rtc::scoped_refptr<CapturerTrackSource> video_device =
+ CapturerTrackSource::Create();
+ if (video_device) {
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track_(
+ peer_connection_factory_->CreateVideoTrack(video_device, kVideoLabel));
+ main_wnd_->StartLocalRenderer(video_track_.get());
+
+ result_or_error = peer_connection_->AddTrack(video_track_, {kStreamId});
+ if (!result_or_error.ok()) {
+ RTC_LOG(LS_ERROR) << "Failed to add video track to PeerConnection: "
+ << result_or_error.error().message();
+ }
+ } else {
+ RTC_LOG(LS_ERROR) << "OpenVideoCaptureDevice failed";
+ }
+
+ main_wnd_->SwitchToStreamingUI();
+}
+
+void Conductor::DisconnectFromCurrentPeer() {
+ RTC_LOG(LS_INFO) << __FUNCTION__;
+ if (peer_connection_.get()) {
+ client_->SendHangUp(peer_id_);
+ DeletePeerConnection();
+ }
+
+ if (main_wnd_->IsWindow())
+ main_wnd_->SwitchToPeerList(client_->peers());
+}
+
+void Conductor::UIThreadCallback(int msg_id, void* data) {
+ switch (msg_id) {
+ case PEER_CONNECTION_CLOSED:
+ RTC_LOG(LS_INFO) << "PEER_CONNECTION_CLOSED";
+ DeletePeerConnection();
+
+ if (main_wnd_->IsWindow()) {
+ if (client_->is_connected()) {
+ main_wnd_->SwitchToPeerList(client_->peers());
+ } else {
+ main_wnd_->SwitchToConnectUI();
+ }
+ } else {
+ DisconnectFromServer();
+ }
+ break;
+
+ case SEND_MESSAGE_TO_PEER: {
+ RTC_LOG(LS_INFO) << "SEND_MESSAGE_TO_PEER";
+ std::string* msg = reinterpret_cast<std::string*>(data);
+ if (msg) {
+ // For convenience, we always run the message through the queue.
+ // This way we can be sure that messages are sent to the server
+ // in the same order they were signaled without much hassle.
+ pending_messages_.push_back(msg);
+ }
+
+ if (!pending_messages_.empty() && !client_->IsSendingMessage()) {
+ msg = pending_messages_.front();
+ pending_messages_.pop_front();
+
+ if (!client_->SendToPeer(peer_id_, *msg) && peer_id_ != -1) {
+ RTC_LOG(LS_ERROR) << "SendToPeer failed";
+ DisconnectFromServer();
+ }
+ delete msg;
+ }
+
+ if (!peer_connection_.get())
+ peer_id_ = -1;
+
+ break;
+ }
+
+ case NEW_TRACK_ADDED: {
+ auto* track = reinterpret_cast<webrtc::MediaStreamTrackInterface*>(data);
+ if (track->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) {
+ auto* video_track = static_cast<webrtc::VideoTrackInterface*>(track);
+ main_wnd_->StartRemoteRenderer(video_track);
+ }
+ track->Release();
+ break;
+ }
+
+ case TRACK_REMOVED: {
+ // Remote peer stopped sending a track.
+ auto* track = reinterpret_cast<webrtc::MediaStreamTrackInterface*>(data);
+ track->Release();
+ break;
+ }
+
+ default:
+ RTC_DCHECK_NOTREACHED();
+ break;
+ }
+}
+
+void Conductor::OnSuccess(webrtc::SessionDescriptionInterface* desc) {
+ peer_connection_->SetLocalDescription(
+ DummySetSessionDescriptionObserver::Create().get(), desc);
+
+ std::string sdp;
+ desc->ToString(&sdp);
+
+ // For loopback test. To save some connecting delay.
+ if (loopback_) {
+ // Replace message type from "offer" to "answer"
+ std::unique_ptr<webrtc::SessionDescriptionInterface> session_description =
+ webrtc::CreateSessionDescription(webrtc::SdpType::kAnswer, sdp);
+ peer_connection_->SetRemoteDescription(
+ DummySetSessionDescriptionObserver::Create().get(),
+ session_description.release());
+ return;
+ }
+
+ Json::Value jmessage;
+ jmessage[kSessionDescriptionTypeName] =
+ webrtc::SdpTypeToString(desc->GetType());
+ jmessage[kSessionDescriptionSdpName] = sdp;
+
+ Json::StreamWriterBuilder factory;
+ SendMessage(Json::writeString(factory, jmessage));
+}
+
+void Conductor::OnFailure(webrtc::RTCError error) {
+ RTC_LOG(LS_ERROR) << ToString(error.type()) << ": " << error.message();
+}
+
+void Conductor::SendMessage(const std::string& json_object) {
+ std::string* msg = new std::string(json_object);
+ main_wnd_->QueueUIThreadCallback(SEND_MESSAGE_TO_PEER, msg);
+}
diff --git a/third_party/libwebrtc/examples/peerconnection/client/conductor.h b/third_party/libwebrtc/examples/peerconnection/client/conductor.h
new file mode 100644
index 0000000000..80617d3cf4
--- /dev/null
+++ b/third_party/libwebrtc/examples/peerconnection/client/conductor.h
@@ -0,0 +1,136 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef EXAMPLES_PEERCONNECTION_CLIENT_CONDUCTOR_H_
+#define EXAMPLES_PEERCONNECTION_CLIENT_CONDUCTOR_H_
+
+#include <deque>
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "api/media_stream_interface.h"
+#include "api/peer_connection_interface.h"
+#include "examples/peerconnection/client/main_wnd.h"
+#include "examples/peerconnection/client/peer_connection_client.h"
+#include "rtc_base/thread.h"
+
+namespace webrtc {
+class VideoCaptureModule;
+} // namespace webrtc
+
+namespace cricket {
+class VideoRenderer;
+} // namespace cricket
+
+class Conductor : public webrtc::PeerConnectionObserver,
+ public webrtc::CreateSessionDescriptionObserver,
+ public PeerConnectionClientObserver,
+ public MainWndCallback {
+ public:
+ enum CallbackID {
+ MEDIA_CHANNELS_INITIALIZED = 1,
+ PEER_CONNECTION_CLOSED,
+ SEND_MESSAGE_TO_PEER,
+ NEW_TRACK_ADDED,
+ TRACK_REMOVED,
+ };
+
+ Conductor(PeerConnectionClient* client, MainWindow* main_wnd);
+
+ bool connection_active() const;
+
+ void Close() override;
+
+ protected:
+ ~Conductor();
+ bool InitializePeerConnection();
+ bool ReinitializePeerConnectionForLoopback();
+ bool CreatePeerConnection();
+ void DeletePeerConnection();
+ void EnsureStreamingUI();
+ void AddTracks();
+
+ //
+ // PeerConnectionObserver implementation.
+ //
+
+ void OnSignalingChange(
+ webrtc::PeerConnectionInterface::SignalingState new_state) override {}
+ void OnAddTrack(
+ rtc::scoped_refptr<webrtc::RtpReceiverInterface> receiver,
+ const std::vector<rtc::scoped_refptr<webrtc::MediaStreamInterface>>&
+ streams) override;
+ void OnRemoveTrack(
+ rtc::scoped_refptr<webrtc::RtpReceiverInterface> receiver) override;
+ void OnDataChannel(
+ rtc::scoped_refptr<webrtc::DataChannelInterface> channel) override {}
+ void OnRenegotiationNeeded() override {}
+ void OnIceConnectionChange(
+ webrtc::PeerConnectionInterface::IceConnectionState new_state) override {}
+ void OnIceGatheringChange(
+ webrtc::PeerConnectionInterface::IceGatheringState new_state) override {}
+ void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override;
+ void OnIceConnectionReceivingChange(bool receiving) override {}
+
+ //
+ // PeerConnectionClientObserver implementation.
+ //
+
+ void OnSignedIn() override;
+
+ void OnDisconnected() override;
+
+ void OnPeerConnected(int id, const std::string& name) override;
+
+ void OnPeerDisconnected(int id) override;
+
+ void OnMessageFromPeer(int peer_id, const std::string& message) override;
+
+ void OnMessageSent(int err) override;
+
+ void OnServerConnectionFailure() override;
+
+ //
+ // MainWndCallback implementation.
+ //
+
+ void StartLogin(const std::string& server, int port) override;
+
+ void DisconnectFromServer() override;
+
+ void ConnectToPeer(int peer_id) override;
+
+ void DisconnectFromCurrentPeer() override;
+
+ void UIThreadCallback(int msg_id, void* data) override;
+
+ // CreateSessionDescriptionObserver implementation.
+ void OnSuccess(webrtc::SessionDescriptionInterface* desc) override;
+ void OnFailure(webrtc::RTCError error) override;
+
+ protected:
+ // Send a message to the remote peer.
+ void SendMessage(const std::string& json_object);
+
+ int peer_id_;
+ bool loopback_;
+ std::unique_ptr<rtc::Thread> signaling_thread_;
+ rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection_;
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
+ peer_connection_factory_;
+ PeerConnectionClient* client_;
+ MainWindow* main_wnd_;
+ std::deque<std::string*> pending_messages_;
+ std::string server_;
+};
+
+#endif // EXAMPLES_PEERCONNECTION_CLIENT_CONDUCTOR_H_
diff --git a/third_party/libwebrtc/examples/peerconnection/client/defaults.cc b/third_party/libwebrtc/examples/peerconnection/client/defaults.cc
new file mode 100644
index 0000000000..ee3a9e1f0a
--- /dev/null
+++ b/third_party/libwebrtc/examples/peerconnection/client/defaults.cc
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "examples/peerconnection/client/defaults.h"
+
+#include <stdlib.h>
+
+#ifdef WIN32
+#include <winsock2.h>
+#else
+#include <unistd.h>
+#endif
+
+#include "rtc_base/arraysize.h"
+
+const char kAudioLabel[] = "audio_label";
+const char kVideoLabel[] = "video_label";
+const char kStreamId[] = "stream_id";
+const uint16_t kDefaultServerPort = 8888;
+
+std::string GetEnvVarOrDefault(const char* env_var_name,
+ const char* default_value) {
+ std::string value;
+ const char* env_var = getenv(env_var_name);
+ if (env_var)
+ value = env_var;
+
+ if (value.empty())
+ value = default_value;
+
+ return value;
+}
+
+std::string GetPeerConnectionString() {
+ return GetEnvVarOrDefault("WEBRTC_CONNECT", "stun:stun.l.google.com:19302");
+}
+
+std::string GetDefaultServerName() {
+ return GetEnvVarOrDefault("WEBRTC_SERVER", "localhost");
+}
+
+std::string GetPeerName() {
+ char computer_name[256];
+ std::string ret(GetEnvVarOrDefault("USERNAME", "user"));
+ ret += '@';
+ if (gethostname(computer_name, arraysize(computer_name)) == 0) {
+ ret += computer_name;
+ } else {
+ ret += "host";
+ }
+ return ret;
+}
diff --git a/third_party/libwebrtc/examples/peerconnection/client/defaults.h b/third_party/libwebrtc/examples/peerconnection/client/defaults.h
new file mode 100644
index 0000000000..30936fd9d4
--- /dev/null
+++ b/third_party/libwebrtc/examples/peerconnection/client/defaults.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef EXAMPLES_PEERCONNECTION_CLIENT_DEFAULTS_H_
+#define EXAMPLES_PEERCONNECTION_CLIENT_DEFAULTS_H_
+
+#include <stdint.h>
+
+#include <string>
+
+extern const char kAudioLabel[];
+extern const char kVideoLabel[];
+extern const char kStreamId[];
+extern const uint16_t kDefaultServerPort;
+
+std::string GetEnvVarOrDefault(const char* env_var_name,
+ const char* default_value);
+std::string GetPeerConnectionString();
+std::string GetDefaultServerName();
+std::string GetPeerName();
+
+#endif // EXAMPLES_PEERCONNECTION_CLIENT_DEFAULTS_H_
diff --git a/third_party/libwebrtc/examples/peerconnection/client/flag_defs.h b/third_party/libwebrtc/examples/peerconnection/client/flag_defs.h
new file mode 100644
index 0000000000..986daf64ce
--- /dev/null
+++ b/third_party/libwebrtc/examples/peerconnection/client/flag_defs.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef EXAMPLES_PEERCONNECTION_CLIENT_FLAG_DEFS_H_
+#define EXAMPLES_PEERCONNECTION_CLIENT_FLAG_DEFS_H_
+
+#include <string>
+
+#include "absl/flags/flag.h"
+
+extern const uint16_t kDefaultServerPort; // From defaults.[h|cc]
+
+// Define flags for the peerconnect_client testing tool, in a separate
+// header file so that they can be shared across the different main.cc's
+// for each platform.
+
+ABSL_FLAG(bool,
+ autoconnect,
+ false,
+ "Connect to the server without user "
+ "intervention.");
+ABSL_FLAG(std::string, server, "localhost", "The server to connect to.");
+ABSL_FLAG(int,
+ port,
+ kDefaultServerPort,
+ "The port on which the server is listening.");
+ABSL_FLAG(
+ bool,
+ autocall,
+ false,
+ "Call the first available other client on "
+ "the server without user intervention. Note: this flag should only be set "
+ "to true on one of the two clients.");
+
+ABSL_FLAG(
+ std::string,
+ force_fieldtrials,
+ "",
+ "Field trials control experimental features. This flag specifies the field "
+ "trials in effect. E.g. running with "
+ "--force_fieldtrials=WebRTC-FooFeature/Enabled/ "
+ "will assign the group Enabled to field trial WebRTC-FooFeature. Multiple "
+ "trials are separated by \"/\"");
+
+#endif // EXAMPLES_PEERCONNECTION_CLIENT_FLAG_DEFS_H_
diff --git a/third_party/libwebrtc/examples/peerconnection/client/linux/main.cc b/third_party/libwebrtc/examples/peerconnection/client/linux/main.cc
new file mode 100644
index 0000000000..ad3d671073
--- /dev/null
+++ b/third_party/libwebrtc/examples/peerconnection/client/linux/main.cc
@@ -0,0 +1,121 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <glib.h>
+#include <gtk/gtk.h>
+#include <stdio.h>
+
+#include "absl/flags/parse.h"
+#include "api/scoped_refptr.h"
+#include "examples/peerconnection/client/conductor.h"
+#include "examples/peerconnection/client/flag_defs.h"
+#include "examples/peerconnection/client/linux/main_wnd.h"
+#include "examples/peerconnection/client/peer_connection_client.h"
+#include "rtc_base/physical_socket_server.h"
+#include "rtc_base/ssl_adapter.h"
+#include "rtc_base/thread.h"
+#include "system_wrappers/include/field_trial.h"
+#include "test/field_trial.h"
+
+class CustomSocketServer : public rtc::PhysicalSocketServer {
+ public:
+ explicit CustomSocketServer(GtkMainWnd* wnd)
+ : wnd_(wnd), conductor_(NULL), client_(NULL) {}
+ virtual ~CustomSocketServer() {}
+
+ void SetMessageQueue(rtc::Thread* queue) override { message_queue_ = queue; }
+
+ void set_client(PeerConnectionClient* client) { client_ = client; }
+ void set_conductor(Conductor* conductor) { conductor_ = conductor; }
+
+ // Override so that we can also pump the GTK message loop.
+ // This function never waits.
+ bool Wait(webrtc::TimeDelta max_wait_duration, bool process_io) override {
+ // Pump GTK events.
+ // TODO(henrike): We really should move either the socket server or UI to a
+ // different thread. Alternatively we could look at merging the two loops
+ // by implementing a dispatcher for the socket server and/or use
+ // g_main_context_set_poll_func.
+ while (gtk_events_pending())
+ gtk_main_iteration();
+
+ if (!wnd_->IsWindow() && !conductor_->connection_active() &&
+ client_ != NULL && !client_->is_connected()) {
+ message_queue_->Quit();
+ }
+ return rtc::PhysicalSocketServer::Wait(webrtc::TimeDelta::Zero(),
+ process_io);
+ }
+
+ protected:
+ rtc::Thread* message_queue_;
+ GtkMainWnd* wnd_;
+ Conductor* conductor_;
+ PeerConnectionClient* client_;
+};
+
+int main(int argc, char* argv[]) {
+ gtk_init(&argc, &argv);
+// g_type_init API is deprecated (and does nothing) since glib 2.35.0, see:
+// https://mail.gnome.org/archives/commits-list/2012-November/msg07809.html
+#if !GLIB_CHECK_VERSION(2, 35, 0)
+ g_type_init();
+#endif
+// g_thread_init API is deprecated since glib 2.31.0, see release note:
+// http://mail.gnome.org/archives/gnome-announce-list/2011-October/msg00041.html
+#if !GLIB_CHECK_VERSION(2, 31, 0)
+ g_thread_init(NULL);
+#endif
+
+ absl::ParseCommandLine(argc, argv);
+
+ // InitFieldTrialsFromString stores the char*, so the char array must outlive
+ // the application.
+ const std::string forced_field_trials =
+ absl::GetFlag(FLAGS_force_fieldtrials);
+ webrtc::field_trial::InitFieldTrialsFromString(forced_field_trials.c_str());
+
+ // Abort if the user specifies a port that is outside the allowed
+ // range [1, 65535].
+ if ((absl::GetFlag(FLAGS_port) < 1) || (absl::GetFlag(FLAGS_port) > 65535)) {
+ printf("Error: %i is not a valid port.\n", absl::GetFlag(FLAGS_port));
+ return -1;
+ }
+
+ const std::string server = absl::GetFlag(FLAGS_server);
+ GtkMainWnd wnd(server.c_str(), absl::GetFlag(FLAGS_port),
+ absl::GetFlag(FLAGS_autoconnect),
+ absl::GetFlag(FLAGS_autocall));
+ wnd.Create();
+
+ CustomSocketServer socket_server(&wnd);
+ rtc::AutoSocketServerThread thread(&socket_server);
+
+ rtc::InitializeSSL();
+ // Must be constructed after we set the socketserver.
+ PeerConnectionClient client;
+ auto conductor = rtc::make_ref_counted<Conductor>(&client, &wnd);
+ socket_server.set_client(&client);
+ socket_server.set_conductor(conductor.get());
+
+ thread.Run();
+
+ // gtk_main();
+ wnd.Destroy();
+
+ // TODO(henrike): Run the Gtk main loop to tear down the connection.
+ /*
+ while (gtk_events_pending()) {
+ gtk_main_iteration();
+ }
+ */
+ rtc::CleanupSSL();
+ return 0;
+}
diff --git a/third_party/libwebrtc/examples/peerconnection/client/linux/main_wnd.cc b/third_party/libwebrtc/examples/peerconnection/client/linux/main_wnd.cc
new file mode 100644
index 0000000000..2be75d8f8d
--- /dev/null
+++ b/third_party/libwebrtc/examples/peerconnection/client/linux/main_wnd.cc
@@ -0,0 +1,545 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "examples/peerconnection/client/linux/main_wnd.h"
+
+#include <cairo.h>
+#include <gdk/gdk.h>
+#include <gdk/gdkkeysyms.h>
+#include <glib-object.h>
+#include <glib.h>
+#include <gobject/gclosure.h>
+#include <gtk/gtk.h>
+#include <stddef.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include <cstdint>
+#include <map>
+#include <utility>
+
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame_buffer.h"
+#include "api/video/video_rotation.h"
+#include "api/video/video_source_interface.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/convert_from.h"
+
+namespace {
+
+//
+// Simple static functions that simply forward the callback to the
+// GtkMainWnd instance.
+//
+
+gboolean OnDestroyedCallback(GtkWidget* widget,
+ GdkEvent* event,
+ gpointer data) {
+ reinterpret_cast<GtkMainWnd*>(data)->OnDestroyed(widget, event);
+ return FALSE;
+}
+
+void OnClickedCallback(GtkWidget* widget, gpointer data) {
+ reinterpret_cast<GtkMainWnd*>(data)->OnClicked(widget);
+}
+
+gboolean SimulateButtonClick(gpointer button) {
+ g_signal_emit_by_name(button, "clicked");
+ return false;
+}
+
+gboolean OnKeyPressCallback(GtkWidget* widget,
+ GdkEventKey* key,
+ gpointer data) {
+ reinterpret_cast<GtkMainWnd*>(data)->OnKeyPress(widget, key);
+ return false;
+}
+
+void OnRowActivatedCallback(GtkTreeView* tree_view,
+ GtkTreePath* path,
+ GtkTreeViewColumn* column,
+ gpointer data) {
+ reinterpret_cast<GtkMainWnd*>(data)->OnRowActivated(tree_view, path, column);
+}
+
+gboolean SimulateLastRowActivated(gpointer data) {
+ GtkTreeView* tree_view = reinterpret_cast<GtkTreeView*>(data);
+ GtkTreeModel* model = gtk_tree_view_get_model(tree_view);
+
+ // "if iter is NULL, then the number of toplevel nodes is returned."
+ int rows = gtk_tree_model_iter_n_children(model, NULL);
+ GtkTreePath* lastpath = gtk_tree_path_new_from_indices(rows - 1, -1);
+
+ // Select the last item in the list
+ GtkTreeSelection* selection = gtk_tree_view_get_selection(tree_view);
+ gtk_tree_selection_select_path(selection, lastpath);
+
+ // Our TreeView only has one column, so it is column 0.
+ GtkTreeViewColumn* column = gtk_tree_view_get_column(tree_view, 0);
+
+ gtk_tree_view_row_activated(tree_view, lastpath, column);
+
+ gtk_tree_path_free(lastpath);
+ return false;
+}
+
+// Creates a tree view, that we use to display the list of peers.
+void InitializeList(GtkWidget* list) {
+ GtkCellRenderer* renderer = gtk_cell_renderer_text_new();
+ GtkTreeViewColumn* column = gtk_tree_view_column_new_with_attributes(
+ "List Items", renderer, "text", 0, NULL);
+ gtk_tree_view_append_column(GTK_TREE_VIEW(list), column);
+ GtkListStore* store = gtk_list_store_new(2, G_TYPE_STRING, G_TYPE_INT);
+ gtk_tree_view_set_model(GTK_TREE_VIEW(list), GTK_TREE_MODEL(store));
+ g_object_unref(store);
+}
+
+// Adds an entry to a tree view.
+void AddToList(GtkWidget* list, const gchar* str, int value) {
+ GtkListStore* store =
+ GTK_LIST_STORE(gtk_tree_view_get_model(GTK_TREE_VIEW(list)));
+
+ GtkTreeIter iter;
+ gtk_list_store_append(store, &iter);
+ gtk_list_store_set(store, &iter, 0, str, 1, value, -1);
+}
+
+struct UIThreadCallbackData {
+ explicit UIThreadCallbackData(MainWndCallback* cb, int id, void* d)
+ : callback(cb), msg_id(id), data(d) {}
+ MainWndCallback* callback;
+ int msg_id;
+ void* data;
+};
+
+gboolean HandleUIThreadCallback(gpointer data) {
+ UIThreadCallbackData* cb_data = reinterpret_cast<UIThreadCallbackData*>(data);
+ cb_data->callback->UIThreadCallback(cb_data->msg_id, cb_data->data);
+ delete cb_data;
+ return false;
+}
+
+gboolean Redraw(gpointer data) {
+ GtkMainWnd* wnd = reinterpret_cast<GtkMainWnd*>(data);
+ wnd->OnRedraw();
+ return false;
+}
+
+gboolean Draw(GtkWidget* widget, cairo_t* cr, gpointer data) {
+ GtkMainWnd* wnd = reinterpret_cast<GtkMainWnd*>(data);
+ wnd->Draw(widget, cr);
+ return false;
+}
+
+} // namespace
+
+//
+// GtkMainWnd implementation.
+//
+
+GtkMainWnd::GtkMainWnd(const char* server,
+ int port,
+ bool autoconnect,
+ bool autocall)
+ : window_(NULL),
+ draw_area_(NULL),
+ vbox_(NULL),
+ server_edit_(NULL),
+ port_edit_(NULL),
+ peer_list_(NULL),
+ callback_(NULL),
+ server_(server),
+ autoconnect_(autoconnect),
+ autocall_(autocall) {
+ char buffer[10];
+ snprintf(buffer, sizeof(buffer), "%i", port);
+ port_ = buffer;
+}
+
+GtkMainWnd::~GtkMainWnd() {
+ RTC_DCHECK(!IsWindow());
+}
+
+void GtkMainWnd::RegisterObserver(MainWndCallback* callback) {
+ callback_ = callback;
+}
+
+bool GtkMainWnd::IsWindow() {
+ return window_ != NULL && GTK_IS_WINDOW(window_);
+}
+
+void GtkMainWnd::MessageBox(const char* caption,
+ const char* text,
+ bool is_error) {
+ GtkWidget* dialog = gtk_message_dialog_new(
+ GTK_WINDOW(window_), GTK_DIALOG_DESTROY_WITH_PARENT,
+ is_error ? GTK_MESSAGE_ERROR : GTK_MESSAGE_INFO, GTK_BUTTONS_CLOSE, "%s",
+ text);
+ gtk_window_set_title(GTK_WINDOW(dialog), caption);
+ gtk_dialog_run(GTK_DIALOG(dialog));
+ gtk_widget_destroy(dialog);
+}
+
+MainWindow::UI GtkMainWnd::current_ui() {
+ if (vbox_)
+ return CONNECT_TO_SERVER;
+
+ if (peer_list_)
+ return LIST_PEERS;
+
+ return STREAMING;
+}
+
+void GtkMainWnd::StartLocalRenderer(webrtc::VideoTrackInterface* local_video) {
+ local_renderer_.reset(new VideoRenderer(this, local_video));
+}
+
+void GtkMainWnd::StopLocalRenderer() {
+ local_renderer_.reset();
+}
+
+void GtkMainWnd::StartRemoteRenderer(
+ webrtc::VideoTrackInterface* remote_video) {
+ remote_renderer_.reset(new VideoRenderer(this, remote_video));
+}
+
+void GtkMainWnd::StopRemoteRenderer() {
+ remote_renderer_.reset();
+}
+
+void GtkMainWnd::QueueUIThreadCallback(int msg_id, void* data) {
+ g_idle_add(HandleUIThreadCallback,
+ new UIThreadCallbackData(callback_, msg_id, data));
+}
+
+bool GtkMainWnd::Create() {
+ RTC_DCHECK(window_ == NULL);
+
+ window_ = gtk_window_new(GTK_WINDOW_TOPLEVEL);
+ if (window_) {
+ gtk_window_set_position(GTK_WINDOW(window_), GTK_WIN_POS_CENTER);
+ gtk_window_set_default_size(GTK_WINDOW(window_), 640, 480);
+ gtk_window_set_title(GTK_WINDOW(window_), "PeerConnection client");
+ g_signal_connect(G_OBJECT(window_), "delete-event",
+ G_CALLBACK(&OnDestroyedCallback), this);
+ g_signal_connect(window_, "key-press-event", G_CALLBACK(OnKeyPressCallback),
+ this);
+
+ SwitchToConnectUI();
+ }
+
+ return window_ != NULL;
+}
+
+bool GtkMainWnd::Destroy() {
+ if (!IsWindow())
+ return false;
+
+ gtk_widget_destroy(window_);
+ window_ = NULL;
+
+ return true;
+}
+
+void GtkMainWnd::SwitchToConnectUI() {
+ RTC_LOG(LS_INFO) << __FUNCTION__;
+
+ RTC_DCHECK(IsWindow());
+ RTC_DCHECK(vbox_ == NULL);
+
+ gtk_container_set_border_width(GTK_CONTAINER(window_), 10);
+
+ if (peer_list_) {
+ gtk_widget_destroy(peer_list_);
+ peer_list_ = NULL;
+ }
+
+ vbox_ = gtk_box_new(GTK_ORIENTATION_VERTICAL, 5);
+ GtkWidget* valign = gtk_alignment_new(0, 1, 0, 0);
+ gtk_container_add(GTK_CONTAINER(vbox_), valign);
+ gtk_container_add(GTK_CONTAINER(window_), vbox_);
+
+ GtkWidget* hbox = gtk_box_new(GTK_ORIENTATION_HORIZONTAL, 5);
+
+ GtkWidget* label = gtk_label_new("Server");
+ gtk_container_add(GTK_CONTAINER(hbox), label);
+
+ server_edit_ = gtk_entry_new();
+ gtk_entry_set_text(GTK_ENTRY(server_edit_), server_.c_str());
+ gtk_widget_set_size_request(server_edit_, 400, 30);
+ gtk_container_add(GTK_CONTAINER(hbox), server_edit_);
+
+ port_edit_ = gtk_entry_new();
+ gtk_entry_set_text(GTK_ENTRY(port_edit_), port_.c_str());
+ gtk_widget_set_size_request(port_edit_, 70, 30);
+ gtk_container_add(GTK_CONTAINER(hbox), port_edit_);
+
+ GtkWidget* button = gtk_button_new_with_label("Connect");
+ gtk_widget_set_size_request(button, 70, 30);
+ g_signal_connect(button, "clicked", G_CALLBACK(OnClickedCallback), this);
+ gtk_container_add(GTK_CONTAINER(hbox), button);
+
+ GtkWidget* halign = gtk_alignment_new(1, 0, 0, 0);
+ gtk_container_add(GTK_CONTAINER(halign), hbox);
+ gtk_box_pack_start(GTK_BOX(vbox_), halign, FALSE, FALSE, 0);
+
+ gtk_widget_show_all(window_);
+
+ if (autoconnect_)
+ g_idle_add(SimulateButtonClick, button);
+}
+
+void GtkMainWnd::SwitchToPeerList(const Peers& peers) {
+ RTC_LOG(LS_INFO) << __FUNCTION__;
+
+ if (!peer_list_) {
+ gtk_container_set_border_width(GTK_CONTAINER(window_), 0);
+ if (vbox_) {
+ gtk_widget_destroy(vbox_);
+ vbox_ = NULL;
+ server_edit_ = NULL;
+ port_edit_ = NULL;
+ } else if (draw_area_) {
+ gtk_widget_destroy(draw_area_);
+ draw_area_ = NULL;
+ draw_buffer_.reset();
+ }
+
+ peer_list_ = gtk_tree_view_new();
+ g_signal_connect(peer_list_, "row-activated",
+ G_CALLBACK(OnRowActivatedCallback), this);
+ gtk_tree_view_set_headers_visible(GTK_TREE_VIEW(peer_list_), FALSE);
+ InitializeList(peer_list_);
+ gtk_container_add(GTK_CONTAINER(window_), peer_list_);
+ gtk_widget_show_all(window_);
+ } else {
+ GtkListStore* store =
+ GTK_LIST_STORE(gtk_tree_view_get_model(GTK_TREE_VIEW(peer_list_)));
+ gtk_list_store_clear(store);
+ }
+
+ AddToList(peer_list_, "List of currently connected peers:", -1);
+ for (Peers::const_iterator i = peers.begin(); i != peers.end(); ++i)
+ AddToList(peer_list_, i->second.c_str(), i->first);
+
+ if (autocall_ && peers.begin() != peers.end())
+ g_idle_add(SimulateLastRowActivated, peer_list_);
+}
+
+void GtkMainWnd::SwitchToStreamingUI() {
+ RTC_LOG(LS_INFO) << __FUNCTION__;
+
+ RTC_DCHECK(draw_area_ == NULL);
+
+ gtk_container_set_border_width(GTK_CONTAINER(window_), 0);
+ if (peer_list_) {
+ gtk_widget_destroy(peer_list_);
+ peer_list_ = NULL;
+ }
+
+ draw_area_ = gtk_drawing_area_new();
+ gtk_container_add(GTK_CONTAINER(window_), draw_area_);
+ g_signal_connect(G_OBJECT(draw_area_), "draw", G_CALLBACK(&::Draw), this);
+
+ gtk_widget_show_all(window_);
+}
+
+void GtkMainWnd::OnDestroyed(GtkWidget* widget, GdkEvent* event) {
+ callback_->Close();
+ window_ = NULL;
+ draw_area_ = NULL;
+ vbox_ = NULL;
+ server_edit_ = NULL;
+ port_edit_ = NULL;
+ peer_list_ = NULL;
+}
+
+void GtkMainWnd::OnClicked(GtkWidget* widget) {
+ // Make the connect button insensitive, so that it cannot be clicked more than
+ // once. Now that the connection includes auto-retry, it should not be
+ // necessary to click it more than once.
+ gtk_widget_set_sensitive(widget, false);
+ server_ = gtk_entry_get_text(GTK_ENTRY(server_edit_));
+ port_ = gtk_entry_get_text(GTK_ENTRY(port_edit_));
+ int port = port_.length() ? atoi(port_.c_str()) : 0;
+ callback_->StartLogin(server_, port);
+}
+
+void GtkMainWnd::OnKeyPress(GtkWidget* widget, GdkEventKey* key) {
+ if (key->type == GDK_KEY_PRESS) {
+ switch (key->keyval) {
+ case GDK_KEY_Escape:
+ if (draw_area_) {
+ callback_->DisconnectFromCurrentPeer();
+ } else if (peer_list_) {
+ callback_->DisconnectFromServer();
+ }
+ break;
+
+ case GDK_KEY_KP_Enter:
+ case GDK_KEY_Return:
+ if (vbox_) {
+ OnClicked(NULL);
+ } else if (peer_list_) {
+ // OnRowActivated will be called automatically when the user
+ // presses enter.
+ }
+ break;
+
+ default:
+ break;
+ }
+ }
+}
+
+void GtkMainWnd::OnRowActivated(GtkTreeView* tree_view,
+ GtkTreePath* path,
+ GtkTreeViewColumn* column) {
+ RTC_DCHECK(peer_list_ != NULL);
+ GtkTreeIter iter;
+ GtkTreeModel* model;
+ GtkTreeSelection* selection =
+ gtk_tree_view_get_selection(GTK_TREE_VIEW(tree_view));
+ if (gtk_tree_selection_get_selected(selection, &model, &iter)) {
+ char* text;
+ int id = -1;
+ gtk_tree_model_get(model, &iter, 0, &text, 1, &id, -1);
+ if (id != -1)
+ callback_->ConnectToPeer(id);
+ g_free(text);
+ }
+}
+
+void GtkMainWnd::OnRedraw() {
+ gdk_threads_enter();
+
+ VideoRenderer* remote_renderer = remote_renderer_.get();
+ if (remote_renderer && remote_renderer->image() != NULL &&
+ draw_area_ != NULL) {
+ width_ = remote_renderer->width();
+ height_ = remote_renderer->height();
+
+ if (!draw_buffer_.get()) {
+ draw_buffer_size_ = (width_ * height_ * 4) * 4;
+ draw_buffer_.reset(new uint8_t[draw_buffer_size_]);
+ gtk_widget_set_size_request(draw_area_, width_ * 2, height_ * 2);
+ }
+
+ const uint32_t* image =
+ reinterpret_cast<const uint32_t*>(remote_renderer->image());
+ uint32_t* scaled = reinterpret_cast<uint32_t*>(draw_buffer_.get());
+ for (int r = 0; r < height_; ++r) {
+ for (int c = 0; c < width_; ++c) {
+ int x = c * 2;
+ scaled[x] = scaled[x + 1] = image[c];
+ }
+
+ uint32_t* prev_line = scaled;
+ scaled += width_ * 2;
+ memcpy(scaled, prev_line, (width_ * 2) * 4);
+
+ image += width_;
+ scaled += width_ * 2;
+ }
+
+ VideoRenderer* local_renderer = local_renderer_.get();
+ if (local_renderer && local_renderer->image()) {
+ image = reinterpret_cast<const uint32_t*>(local_renderer->image());
+ scaled = reinterpret_cast<uint32_t*>(draw_buffer_.get());
+ // Position the local preview on the right side.
+ scaled += (width_ * 2) - (local_renderer->width() / 2);
+ // right margin...
+ scaled -= 10;
+ // ... towards the bottom.
+ scaled += (height_ * width_ * 4) - ((local_renderer->height() / 2) *
+ (local_renderer->width() / 2) * 4);
+ // bottom margin...
+ scaled -= (width_ * 2) * 5;
+ for (int r = 0; r < local_renderer->height(); r += 2) {
+ for (int c = 0; c < local_renderer->width(); c += 2) {
+ scaled[c / 2] = image[c + r * local_renderer->width()];
+ }
+ scaled += width_ * 2;
+ }
+ }
+
+ gtk_widget_queue_draw(draw_area_);
+ }
+
+ gdk_threads_leave();
+}
+
+void GtkMainWnd::Draw(GtkWidget* widget, cairo_t* cr) {
+ cairo_format_t format = CAIRO_FORMAT_ARGB32;
+ cairo_surface_t* surface = cairo_image_surface_create_for_data(
+ draw_buffer_.get(), format, width_ * 2, height_ * 2,
+ cairo_format_stride_for_width(format, width_ * 2));
+ cairo_set_source_surface(cr, surface, 0, 0);
+ cairo_rectangle(cr, 0, 0, width_ * 2, height_ * 2);
+ cairo_fill(cr);
+ cairo_surface_destroy(surface);
+}
+
+GtkMainWnd::VideoRenderer::VideoRenderer(
+ GtkMainWnd* main_wnd,
+ webrtc::VideoTrackInterface* track_to_render)
+ : width_(0),
+ height_(0),
+ main_wnd_(main_wnd),
+ rendered_track_(track_to_render) {
+ rendered_track_->AddOrUpdateSink(this, rtc::VideoSinkWants());
+}
+
+GtkMainWnd::VideoRenderer::~VideoRenderer() {
+ rendered_track_->RemoveSink(this);
+}
+
+void GtkMainWnd::VideoRenderer::SetSize(int width, int height) {
+ gdk_threads_enter();
+
+ if (width_ == width && height_ == height) {
+ return;
+ }
+
+ width_ = width;
+ height_ = height;
+ image_.reset(new uint8_t[width * height * 4]);
+ gdk_threads_leave();
+}
+
+void GtkMainWnd::VideoRenderer::OnFrame(const webrtc::VideoFrame& video_frame) {
+ gdk_threads_enter();
+
+ rtc::scoped_refptr<webrtc::I420BufferInterface> buffer(
+ video_frame.video_frame_buffer()->ToI420());
+ if (video_frame.rotation() != webrtc::kVideoRotation_0) {
+ buffer = webrtc::I420Buffer::Rotate(*buffer, video_frame.rotation());
+ }
+ SetSize(buffer->width(), buffer->height());
+
+ // TODO(bugs.webrtc.org/6857): This conversion is correct for little-endian
+ // only. Cairo ARGB32 treats pixels as 32-bit values in *native* byte order,
+ // with B in the least significant byte of the 32-bit value. Which on
+ // little-endian means that memory layout is BGRA, with the B byte stored at
+ // lowest address. Libyuv's ARGB format (surprisingly?) uses the same
+ // little-endian format, with B in the first byte in memory, regardless of
+ // native endianness.
+ libyuv::I420ToARGB(buffer->DataY(), buffer->StrideY(), buffer->DataU(),
+ buffer->StrideU(), buffer->DataV(), buffer->StrideV(),
+ image_.get(), width_ * 4, buffer->width(),
+ buffer->height());
+
+ gdk_threads_leave();
+
+ g_idle_add(Redraw, main_wnd_);
+}
diff --git a/third_party/libwebrtc/examples/peerconnection/client/linux/main_wnd.h b/third_party/libwebrtc/examples/peerconnection/client/linux/main_wnd.h
new file mode 100644
index 0000000000..3b31e1be3b
--- /dev/null
+++ b/third_party/libwebrtc/examples/peerconnection/client/linux/main_wnd.h
@@ -0,0 +1,128 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef EXAMPLES_PEERCONNECTION_CLIENT_LINUX_MAIN_WND_H_
+#define EXAMPLES_PEERCONNECTION_CLIENT_LINUX_MAIN_WND_H_
+
+#include <stdint.h>
+
+#include <memory>
+#include <string>
+
+#include "api/media_stream_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_sink_interface.h"
+#include "examples/peerconnection/client/main_wnd.h"
+#include "examples/peerconnection/client/peer_connection_client.h"
+
+// Forward declarations.
+typedef struct _GtkWidget GtkWidget;
+typedef union _GdkEvent GdkEvent;
+typedef struct _GdkEventKey GdkEventKey;
+typedef struct _GtkTreeView GtkTreeView;
+typedef struct _GtkTreePath GtkTreePath;
+typedef struct _GtkTreeViewColumn GtkTreeViewColumn;
+typedef struct _cairo cairo_t;
+
+// Implements the main UI of the peer connection client.
+// This is functionally equivalent to the MainWnd class in the Windows
+// implementation.
+class GtkMainWnd : public MainWindow {
+ public:
+ GtkMainWnd(const char* server, int port, bool autoconnect, bool autocall);
+ ~GtkMainWnd();
+
+ virtual void RegisterObserver(MainWndCallback* callback);
+ virtual bool IsWindow();
+ virtual void SwitchToConnectUI();
+ virtual void SwitchToPeerList(const Peers& peers);
+ virtual void SwitchToStreamingUI();
+ virtual void MessageBox(const char* caption, const char* text, bool is_error);
+ virtual MainWindow::UI current_ui();
+ virtual void StartLocalRenderer(webrtc::VideoTrackInterface* local_video);
+ virtual void StopLocalRenderer();
+ virtual void StartRemoteRenderer(webrtc::VideoTrackInterface* remote_video);
+ virtual void StopRemoteRenderer();
+
+ virtual void QueueUIThreadCallback(int msg_id, void* data);
+
+ // Creates and shows the main window with the |Connect UI| enabled.
+ bool Create();
+
+ // Destroys the window. When the window is destroyed, it ends the
+ // main message loop.
+ bool Destroy();
+
+ // Callback for when the main window is destroyed.
+ void OnDestroyed(GtkWidget* widget, GdkEvent* event);
+
+ // Callback for when the user clicks the "Connect" button.
+ void OnClicked(GtkWidget* widget);
+
+ // Callback for keystrokes. Used to capture Esc and Return.
+ void OnKeyPress(GtkWidget* widget, GdkEventKey* key);
+
+ // Callback when the user double clicks a peer in order to initiate a
+ // connection.
+ void OnRowActivated(GtkTreeView* tree_view,
+ GtkTreePath* path,
+ GtkTreeViewColumn* column);
+
+ void OnRedraw();
+
+ void Draw(GtkWidget* widget, cairo_t* cr);
+
+ protected:
+ class VideoRenderer : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
+ public:
+ VideoRenderer(GtkMainWnd* main_wnd,
+ webrtc::VideoTrackInterface* track_to_render);
+ virtual ~VideoRenderer();
+
+ // VideoSinkInterface implementation
+ void OnFrame(const webrtc::VideoFrame& frame) override;
+
+ const uint8_t* image() const { return image_.get(); }
+
+ int width() const { return width_; }
+
+ int height() const { return height_; }
+
+ protected:
+ void SetSize(int width, int height);
+ std::unique_ptr<uint8_t[]> image_;
+ int width_;
+ int height_;
+ GtkMainWnd* main_wnd_;
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> rendered_track_;
+ };
+
+ protected:
+ GtkWidget* window_; // Our main window.
+ GtkWidget* draw_area_; // The drawing surface for rendering video streams.
+ GtkWidget* vbox_; // Container for the Connect UI.
+ GtkWidget* server_edit_;
+ GtkWidget* port_edit_;
+ GtkWidget* peer_list_; // The list of peers.
+ MainWndCallback* callback_;
+ std::string server_;
+ std::string port_;
+ bool autoconnect_;
+ bool autocall_;
+ std::unique_ptr<VideoRenderer> local_renderer_;
+ std::unique_ptr<VideoRenderer> remote_renderer_;
+ int width_;
+ int height_;
+ std::unique_ptr<uint8_t[]> draw_buffer_;
+ int draw_buffer_size_;
+};
+
+#endif // EXAMPLES_PEERCONNECTION_CLIENT_LINUX_MAIN_WND_H_
diff --git a/third_party/libwebrtc/examples/peerconnection/client/main.cc b/third_party/libwebrtc/examples/peerconnection/client/main.cc
new file mode 100644
index 0000000000..32bc52bda4
--- /dev/null
+++ b/third_party/libwebrtc/examples/peerconnection/client/main.cc
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// clang-format off
+// clang formating would change include order.
+#include <windows.h>
+#include <shellapi.h> // must come after windows.h
+// clang-format on
+
+#include <string>
+#include <vector>
+
+#include "absl/flags/parse.h"
+#include "examples/peerconnection/client/conductor.h"
+#include "examples/peerconnection/client/flag_defs.h"
+#include "examples/peerconnection/client/main_wnd.h"
+#include "examples/peerconnection/client/peer_connection_client.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/ssl_adapter.h"
+#include "rtc_base/string_utils.h" // For ToUtf8
+#include "rtc_base/win32_socket_init.h"
+#include "system_wrappers/include/field_trial.h"
+#include "test/field_trial.h"
+
+namespace {
+// A helper class to translate Windows command line arguments into UTF8,
+// which then allows us to just pass them to the flags system.
+// This encapsulates all the work of getting the command line and translating
+// it to an array of 8-bit strings; all you have to do is create one of these,
+// and then call argc() and argv().
+class WindowsCommandLineArguments {
+ public:
+ WindowsCommandLineArguments();
+
+ WindowsCommandLineArguments(const WindowsCommandLineArguments&) = delete;
+ WindowsCommandLineArguments& operator=(WindowsCommandLineArguments&) = delete;
+
+ int argc() { return argv_.size(); }
+ char** argv() { return argv_.data(); }
+
+ private:
+ // Owned argument strings.
+ std::vector<std::string> args_;
+ // Pointers, to get layout compatible with char** argv.
+ std::vector<char*> argv_;
+};
+
+WindowsCommandLineArguments::WindowsCommandLineArguments() {
+ // start by getting the command line.
+ LPCWSTR command_line = ::GetCommandLineW();
+ // now, convert it to a list of wide char strings.
+ int argc;
+ LPWSTR* wide_argv = ::CommandLineToArgvW(command_line, &argc);
+
+ // iterate over the returned wide strings;
+ for (int i = 0; i < argc; ++i) {
+ args_.push_back(rtc::ToUtf8(wide_argv[i], wcslen(wide_argv[i])));
+ // make sure the argv array points to the string data.
+ argv_.push_back(const_cast<char*>(args_.back().c_str()));
+ }
+ LocalFree(wide_argv);
+}
+
+} // namespace
+int PASCAL wWinMain(HINSTANCE instance,
+ HINSTANCE prev_instance,
+ wchar_t* cmd_line,
+ int cmd_show) {
+ rtc::WinsockInitializer winsock_init;
+ rtc::PhysicalSocketServer ss;
+ rtc::AutoSocketServerThread main_thread(&ss);
+
+ WindowsCommandLineArguments win_args;
+ int argc = win_args.argc();
+ char** argv = win_args.argv();
+
+ absl::ParseCommandLine(argc, argv);
+
+ // InitFieldTrialsFromString stores the char*, so the char array must outlive
+ // the application.
+ const std::string forced_field_trials =
+ absl::GetFlag(FLAGS_force_fieldtrials);
+ webrtc::field_trial::InitFieldTrialsFromString(forced_field_trials.c_str());
+
+ // Abort if the user specifies a port that is outside the allowed
+ // range [1, 65535].
+ if ((absl::GetFlag(FLAGS_port) < 1) || (absl::GetFlag(FLAGS_port) > 65535)) {
+ printf("Error: %i is not a valid port.\n", absl::GetFlag(FLAGS_port));
+ return -1;
+ }
+
+ const std::string server = absl::GetFlag(FLAGS_server);
+ MainWnd wnd(server.c_str(), absl::GetFlag(FLAGS_port),
+ absl::GetFlag(FLAGS_autoconnect), absl::GetFlag(FLAGS_autocall));
+ if (!wnd.Create()) {
+ RTC_DCHECK_NOTREACHED();
+ return -1;
+ }
+
+ rtc::InitializeSSL();
+ PeerConnectionClient client;
+ auto conductor = rtc::make_ref_counted<Conductor>(&client, &wnd);
+
+ // Main loop.
+ MSG msg;
+ BOOL gm;
+ while ((gm = ::GetMessage(&msg, NULL, 0, 0)) != 0 && gm != -1) {
+ if (!wnd.PreTranslateMessage(&msg)) {
+ ::TranslateMessage(&msg);
+ ::DispatchMessage(&msg);
+ }
+ }
+
+ if (conductor->connection_active() || client.is_connected()) {
+ while ((conductor->connection_active() || client.is_connected()) &&
+ (gm = ::GetMessage(&msg, NULL, 0, 0)) != 0 && gm != -1) {
+ if (!wnd.PreTranslateMessage(&msg)) {
+ ::TranslateMessage(&msg);
+ ::DispatchMessage(&msg);
+ }
+ }
+ }
+
+ rtc::CleanupSSL();
+ return 0;
+}
diff --git a/third_party/libwebrtc/examples/peerconnection/client/main_wnd.cc b/third_party/libwebrtc/examples/peerconnection/client/main_wnd.cc
new file mode 100644
index 0000000000..afafa621b3
--- /dev/null
+++ b/third_party/libwebrtc/examples/peerconnection/client/main_wnd.cc
@@ -0,0 +1,633 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "examples/peerconnection/client/main_wnd.h"
+
+#include <math.h>
+
+#include "api/video/i420_buffer.h"
+#include "examples/peerconnection/client/defaults.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "third_party/libyuv/include/libyuv/convert_argb.h"
+
+ATOM MainWnd::wnd_class_ = 0;
+const wchar_t MainWnd::kClassName[] = L"WebRTC_MainWnd";
+
+namespace {
+
+const char kConnecting[] = "Connecting... ";
+const char kNoVideoStreams[] = "(no video streams either way)";
+const char kNoIncomingStream[] = "(no incoming video)";
+
+void CalculateWindowSizeForText(HWND wnd,
+ const wchar_t* text,
+ size_t* width,
+ size_t* height) {
+ HDC dc = ::GetDC(wnd);
+ RECT text_rc = {0};
+ ::DrawTextW(dc, text, -1, &text_rc, DT_CALCRECT | DT_SINGLELINE);
+ ::ReleaseDC(wnd, dc);
+ RECT client, window;
+ ::GetClientRect(wnd, &client);
+ ::GetWindowRect(wnd, &window);
+
+ *width = text_rc.right - text_rc.left;
+ *width += (window.right - window.left) - (client.right - client.left);
+ *height = text_rc.bottom - text_rc.top;
+ *height += (window.bottom - window.top) - (client.bottom - client.top);
+}
+
+HFONT GetDefaultFont() {
+ static HFONT font = reinterpret_cast<HFONT>(GetStockObject(DEFAULT_GUI_FONT));
+ return font;
+}
+
+std::string GetWindowText(HWND wnd) {
+ char text[MAX_PATH] = {0};
+ ::GetWindowTextA(wnd, &text[0], ARRAYSIZE(text));
+ return text;
+}
+
+void AddListBoxItem(HWND listbox, const std::string& str, LPARAM item_data) {
+ LRESULT index = ::SendMessageA(listbox, LB_ADDSTRING, 0,
+ reinterpret_cast<LPARAM>(str.c_str()));
+ ::SendMessageA(listbox, LB_SETITEMDATA, index, item_data);
+}
+
+} // namespace
+
+MainWnd::MainWnd(const char* server,
+ int port,
+ bool auto_connect,
+ bool auto_call)
+ : ui_(CONNECT_TO_SERVER),
+ wnd_(NULL),
+ edit1_(NULL),
+ edit2_(NULL),
+ label1_(NULL),
+ label2_(NULL),
+ button_(NULL),
+ listbox_(NULL),
+ destroyed_(false),
+ nested_msg_(NULL),
+ callback_(NULL),
+ server_(server),
+ auto_connect_(auto_connect),
+ auto_call_(auto_call) {
+ char buffer[10];
+ snprintf(buffer, sizeof(buffer), "%i", port);
+ port_ = buffer;
+}
+
+MainWnd::~MainWnd() {
+ RTC_DCHECK(!IsWindow());
+}
+
+bool MainWnd::Create() {
+ RTC_DCHECK(wnd_ == NULL);
+ if (!RegisterWindowClass())
+ return false;
+
+ ui_thread_id_ = ::GetCurrentThreadId();
+ wnd_ =
+ ::CreateWindowExW(WS_EX_OVERLAPPEDWINDOW, kClassName, L"WebRTC",
+ WS_OVERLAPPEDWINDOW | WS_VISIBLE | WS_CLIPCHILDREN,
+ CW_USEDEFAULT, CW_USEDEFAULT, CW_USEDEFAULT,
+ CW_USEDEFAULT, NULL, NULL, GetModuleHandle(NULL), this);
+
+ ::SendMessage(wnd_, WM_SETFONT, reinterpret_cast<WPARAM>(GetDefaultFont()),
+ TRUE);
+
+ CreateChildWindows();
+ SwitchToConnectUI();
+
+ return wnd_ != NULL;
+}
+
+bool MainWnd::Destroy() {
+ BOOL ret = FALSE;
+ if (IsWindow()) {
+ ret = ::DestroyWindow(wnd_);
+ }
+
+ return ret != FALSE;
+}
+
+void MainWnd::RegisterObserver(MainWndCallback* callback) {
+ callback_ = callback;
+}
+
+bool MainWnd::IsWindow() {
+ return wnd_ && ::IsWindow(wnd_) != FALSE;
+}
+
+bool MainWnd::PreTranslateMessage(MSG* msg) {
+ bool ret = false;
+ if (msg->message == WM_CHAR) {
+ if (msg->wParam == VK_TAB) {
+ HandleTabbing();
+ ret = true;
+ } else if (msg->wParam == VK_RETURN) {
+ OnDefaultAction();
+ ret = true;
+ } else if (msg->wParam == VK_ESCAPE) {
+ if (callback_) {
+ if (ui_ == STREAMING) {
+ callback_->DisconnectFromCurrentPeer();
+ } else {
+ callback_->DisconnectFromServer();
+ }
+ }
+ }
+ } else if (msg->hwnd == NULL && msg->message == UI_THREAD_CALLBACK) {
+ callback_->UIThreadCallback(static_cast<int>(msg->wParam),
+ reinterpret_cast<void*>(msg->lParam));
+ ret = true;
+ }
+ return ret;
+}
+
+void MainWnd::SwitchToConnectUI() {
+ RTC_DCHECK(IsWindow());
+ LayoutPeerListUI(false);
+ ui_ = CONNECT_TO_SERVER;
+ LayoutConnectUI(true);
+ ::SetFocus(edit1_);
+
+ if (auto_connect_)
+ ::PostMessage(button_, BM_CLICK, 0, 0);
+}
+
+void MainWnd::SwitchToPeerList(const Peers& peers) {
+ LayoutConnectUI(false);
+
+ ::SendMessage(listbox_, LB_RESETCONTENT, 0, 0);
+
+ AddListBoxItem(listbox_, "List of currently connected peers:", -1);
+ Peers::const_iterator i = peers.begin();
+ for (; i != peers.end(); ++i)
+ AddListBoxItem(listbox_, i->second.c_str(), i->first);
+
+ ui_ = LIST_PEERS;
+ LayoutPeerListUI(true);
+ ::SetFocus(listbox_);
+
+ if (auto_call_ && peers.begin() != peers.end()) {
+ // Get the number of items in the list
+ LRESULT count = ::SendMessage(listbox_, LB_GETCOUNT, 0, 0);
+ if (count != LB_ERR) {
+ // Select the last item in the list
+ LRESULT selection = ::SendMessage(listbox_, LB_SETCURSEL, count - 1, 0);
+ if (selection != LB_ERR)
+ ::PostMessage(wnd_, WM_COMMAND,
+ MAKEWPARAM(GetDlgCtrlID(listbox_), LBN_DBLCLK),
+ reinterpret_cast<LPARAM>(listbox_));
+ }
+ }
+}
+
+void MainWnd::SwitchToStreamingUI() {
+ LayoutConnectUI(false);
+ LayoutPeerListUI(false);
+ ui_ = STREAMING;
+}
+
+void MainWnd::MessageBox(const char* caption, const char* text, bool is_error) {
+ DWORD flags = MB_OK;
+ if (is_error)
+ flags |= MB_ICONERROR;
+
+ ::MessageBoxA(handle(), text, caption, flags);
+}
+
+void MainWnd::StartLocalRenderer(webrtc::VideoTrackInterface* local_video) {
+ local_renderer_.reset(new VideoRenderer(handle(), 1, 1, local_video));
+}
+
+void MainWnd::StopLocalRenderer() {
+ local_renderer_.reset();
+}
+
+void MainWnd::StartRemoteRenderer(webrtc::VideoTrackInterface* remote_video) {
+ remote_renderer_.reset(new VideoRenderer(handle(), 1, 1, remote_video));
+}
+
+void MainWnd::StopRemoteRenderer() {
+ remote_renderer_.reset();
+}
+
+void MainWnd::QueueUIThreadCallback(int msg_id, void* data) {
+ ::PostThreadMessage(ui_thread_id_, UI_THREAD_CALLBACK,
+ static_cast<WPARAM>(msg_id),
+ reinterpret_cast<LPARAM>(data));
+}
+
+void MainWnd::OnPaint() {
+ PAINTSTRUCT ps;
+ ::BeginPaint(handle(), &ps);
+
+ RECT rc;
+ ::GetClientRect(handle(), &rc);
+
+ VideoRenderer* local_renderer = local_renderer_.get();
+ VideoRenderer* remote_renderer = remote_renderer_.get();
+ if (ui_ == STREAMING && remote_renderer && local_renderer) {
+ AutoLock<VideoRenderer> local_lock(local_renderer);
+ AutoLock<VideoRenderer> remote_lock(remote_renderer);
+
+ const BITMAPINFO& bmi = remote_renderer->bmi();
+ int height = abs(bmi.bmiHeader.biHeight);
+ int width = bmi.bmiHeader.biWidth;
+
+ const uint8_t* image = remote_renderer->image();
+ if (image != NULL) {
+ HDC dc_mem = ::CreateCompatibleDC(ps.hdc);
+ ::SetStretchBltMode(dc_mem, HALFTONE);
+
+ // Set the map mode so that the ratio will be maintained for us.
+ HDC all_dc[] = {ps.hdc, dc_mem};
+ for (size_t i = 0; i < arraysize(all_dc); ++i) {
+ SetMapMode(all_dc[i], MM_ISOTROPIC);
+ SetWindowExtEx(all_dc[i], width, height, NULL);
+ SetViewportExtEx(all_dc[i], rc.right, rc.bottom, NULL);
+ }
+
+ HBITMAP bmp_mem = ::CreateCompatibleBitmap(ps.hdc, rc.right, rc.bottom);
+ HGDIOBJ bmp_old = ::SelectObject(dc_mem, bmp_mem);
+
+ POINT logical_area = {rc.right, rc.bottom};
+ DPtoLP(ps.hdc, &logical_area, 1);
+
+ HBRUSH brush = ::CreateSolidBrush(RGB(0, 0, 0));
+ RECT logical_rect = {0, 0, logical_area.x, logical_area.y};
+ ::FillRect(dc_mem, &logical_rect, brush);
+ ::DeleteObject(brush);
+
+ int x = (logical_area.x / 2) - (width / 2);
+ int y = (logical_area.y / 2) - (height / 2);
+
+ StretchDIBits(dc_mem, x, y, width, height, 0, 0, width, height, image,
+ &bmi, DIB_RGB_COLORS, SRCCOPY);
+
+ if ((rc.right - rc.left) > 200 && (rc.bottom - rc.top) > 200) {
+ const BITMAPINFO& bmi = local_renderer->bmi();
+ image = local_renderer->image();
+ int thumb_width = bmi.bmiHeader.biWidth / 4;
+ int thumb_height = abs(bmi.bmiHeader.biHeight) / 4;
+ StretchDIBits(dc_mem, logical_area.x - thumb_width - 10,
+ logical_area.y - thumb_height - 10, thumb_width,
+ thumb_height, 0, 0, bmi.bmiHeader.biWidth,
+ -bmi.bmiHeader.biHeight, image, &bmi, DIB_RGB_COLORS,
+ SRCCOPY);
+ }
+
+ BitBlt(ps.hdc, 0, 0, logical_area.x, logical_area.y, dc_mem, 0, 0,
+ SRCCOPY);
+
+ // Cleanup.
+ ::SelectObject(dc_mem, bmp_old);
+ ::DeleteObject(bmp_mem);
+ ::DeleteDC(dc_mem);
+ } else {
+ // We're still waiting for the video stream to be initialized.
+ HBRUSH brush = ::CreateSolidBrush(RGB(0, 0, 0));
+ ::FillRect(ps.hdc, &rc, brush);
+ ::DeleteObject(brush);
+
+ HGDIOBJ old_font = ::SelectObject(ps.hdc, GetDefaultFont());
+ ::SetTextColor(ps.hdc, RGB(0xff, 0xff, 0xff));
+ ::SetBkMode(ps.hdc, TRANSPARENT);
+
+ std::string text(kConnecting);
+ if (!local_renderer->image()) {
+ text += kNoVideoStreams;
+ } else {
+ text += kNoIncomingStream;
+ }
+ ::DrawTextA(ps.hdc, text.c_str(), -1, &rc,
+ DT_SINGLELINE | DT_CENTER | DT_VCENTER);
+ ::SelectObject(ps.hdc, old_font);
+ }
+ } else {
+ HBRUSH brush = ::CreateSolidBrush(::GetSysColor(COLOR_WINDOW));
+ ::FillRect(ps.hdc, &rc, brush);
+ ::DeleteObject(brush);
+ }
+
+ ::EndPaint(handle(), &ps);
+}
+
+void MainWnd::OnDestroyed() {
+ PostQuitMessage(0);
+}
+
+void MainWnd::OnDefaultAction() {
+ if (!callback_)
+ return;
+ if (ui_ == CONNECT_TO_SERVER) {
+ std::string server(GetWindowText(edit1_));
+ std::string port_str(GetWindowText(edit2_));
+ int port = port_str.length() ? atoi(port_str.c_str()) : 0;
+ callback_->StartLogin(server, port);
+ } else if (ui_ == LIST_PEERS) {
+ LRESULT sel = ::SendMessage(listbox_, LB_GETCURSEL, 0, 0);
+ if (sel != LB_ERR) {
+ LRESULT peer_id = ::SendMessage(listbox_, LB_GETITEMDATA, sel, 0);
+ if (peer_id != -1 && callback_) {
+ callback_->ConnectToPeer(peer_id);
+ }
+ }
+ } else {
+ ::MessageBoxA(wnd_, "OK!", "Yeah", MB_OK);
+ }
+}
+
+bool MainWnd::OnMessage(UINT msg, WPARAM wp, LPARAM lp, LRESULT* result) {
+ switch (msg) {
+ case WM_ERASEBKGND:
+ *result = TRUE;
+ return true;
+
+ case WM_PAINT:
+ OnPaint();
+ return true;
+
+ case WM_SETFOCUS:
+ if (ui_ == CONNECT_TO_SERVER) {
+ SetFocus(edit1_);
+ } else if (ui_ == LIST_PEERS) {
+ SetFocus(listbox_);
+ }
+ return true;
+
+ case WM_SIZE:
+ if (ui_ == CONNECT_TO_SERVER) {
+ LayoutConnectUI(true);
+ } else if (ui_ == LIST_PEERS) {
+ LayoutPeerListUI(true);
+ }
+ break;
+
+ case WM_CTLCOLORSTATIC:
+ *result = reinterpret_cast<LRESULT>(GetSysColorBrush(COLOR_WINDOW));
+ return true;
+
+ case WM_COMMAND:
+ if (button_ == reinterpret_cast<HWND>(lp)) {
+ if (BN_CLICKED == HIWORD(wp))
+ OnDefaultAction();
+ } else if (listbox_ == reinterpret_cast<HWND>(lp)) {
+ if (LBN_DBLCLK == HIWORD(wp)) {
+ OnDefaultAction();
+ }
+ }
+ return true;
+
+ case WM_CLOSE:
+ if (callback_)
+ callback_->Close();
+ break;
+ }
+ return false;
+}
+
+// static
+LRESULT CALLBACK MainWnd::WndProc(HWND hwnd, UINT msg, WPARAM wp, LPARAM lp) {
+ MainWnd* me =
+ reinterpret_cast<MainWnd*>(::GetWindowLongPtr(hwnd, GWLP_USERDATA));
+ if (!me && WM_CREATE == msg) {
+ CREATESTRUCT* cs = reinterpret_cast<CREATESTRUCT*>(lp);
+ me = reinterpret_cast<MainWnd*>(cs->lpCreateParams);
+ me->wnd_ = hwnd;
+ ::SetWindowLongPtr(hwnd, GWLP_USERDATA, reinterpret_cast<LONG_PTR>(me));
+ }
+
+ LRESULT result = 0;
+ if (me) {
+ void* prev_nested_msg = me->nested_msg_;
+ me->nested_msg_ = &msg;
+
+ bool handled = me->OnMessage(msg, wp, lp, &result);
+ if (WM_NCDESTROY == msg) {
+ me->destroyed_ = true;
+ } else if (!handled) {
+ result = ::DefWindowProc(hwnd, msg, wp, lp);
+ }
+
+ if (me->destroyed_ && prev_nested_msg == NULL) {
+ me->OnDestroyed();
+ me->wnd_ = NULL;
+ me->destroyed_ = false;
+ }
+
+ me->nested_msg_ = prev_nested_msg;
+ } else {
+ result = ::DefWindowProc(hwnd, msg, wp, lp);
+ }
+
+ return result;
+}
+
+// static
+bool MainWnd::RegisterWindowClass() {
+ if (wnd_class_)
+ return true;
+
+ WNDCLASSEXW wcex = {sizeof(WNDCLASSEX)};
+ wcex.style = CS_DBLCLKS;
+ wcex.hInstance = GetModuleHandle(NULL);
+ wcex.hbrBackground = reinterpret_cast<HBRUSH>(COLOR_WINDOW + 1);
+ wcex.hCursor = ::LoadCursor(NULL, IDC_ARROW);
+ wcex.lpfnWndProc = &WndProc;
+ wcex.lpszClassName = kClassName;
+ wnd_class_ = ::RegisterClassExW(&wcex);
+ RTC_DCHECK(wnd_class_ != 0);
+ return wnd_class_ != 0;
+}
+
+void MainWnd::CreateChildWindow(HWND* wnd,
+ MainWnd::ChildWindowID id,
+ const wchar_t* class_name,
+ DWORD control_style,
+ DWORD ex_style) {
+ if (::IsWindow(*wnd))
+ return;
+
+ // Child windows are invisible at first, and shown after being resized.
+ DWORD style = WS_CHILD | control_style;
+ *wnd = ::CreateWindowExW(ex_style, class_name, L"", style, 100, 100, 100, 100,
+ wnd_, reinterpret_cast<HMENU>(id),
+ GetModuleHandle(NULL), NULL);
+ RTC_DCHECK(::IsWindow(*wnd) != FALSE);
+ ::SendMessage(*wnd, WM_SETFONT, reinterpret_cast<WPARAM>(GetDefaultFont()),
+ TRUE);
+}
+
+void MainWnd::CreateChildWindows() {
+ // Create the child windows in tab order.
+ CreateChildWindow(&label1_, LABEL1_ID, L"Static", ES_CENTER | ES_READONLY, 0);
+ CreateChildWindow(&edit1_, EDIT_ID, L"Edit",
+ ES_LEFT | ES_NOHIDESEL | WS_TABSTOP, WS_EX_CLIENTEDGE);
+ CreateChildWindow(&label2_, LABEL2_ID, L"Static", ES_CENTER | ES_READONLY, 0);
+ CreateChildWindow(&edit2_, EDIT_ID, L"Edit",
+ ES_LEFT | ES_NOHIDESEL | WS_TABSTOP, WS_EX_CLIENTEDGE);
+ CreateChildWindow(&button_, BUTTON_ID, L"Button", BS_CENTER | WS_TABSTOP, 0);
+
+ CreateChildWindow(&listbox_, LISTBOX_ID, L"ListBox",
+ LBS_HASSTRINGS | LBS_NOTIFY, WS_EX_CLIENTEDGE);
+
+ ::SetWindowTextA(edit1_, server_.c_str());
+ ::SetWindowTextA(edit2_, port_.c_str());
+}
+
+void MainWnd::LayoutConnectUI(bool show) {
+ struct Windows {
+ HWND wnd;
+ const wchar_t* text;
+ size_t width;
+ size_t height;
+ } windows[] = {
+ {label1_, L"Server"}, {edit1_, L"XXXyyyYYYgggXXXyyyYYYggg"},
+ {label2_, L":"}, {edit2_, L"XyXyX"},
+ {button_, L"Connect"},
+ };
+
+ if (show) {
+ const size_t kSeparator = 5;
+ size_t total_width = (ARRAYSIZE(windows) - 1) * kSeparator;
+
+ for (size_t i = 0; i < ARRAYSIZE(windows); ++i) {
+ CalculateWindowSizeForText(windows[i].wnd, windows[i].text,
+ &windows[i].width, &windows[i].height);
+ total_width += windows[i].width;
+ }
+
+ RECT rc;
+ ::GetClientRect(wnd_, &rc);
+ size_t x = (rc.right / 2) - (total_width / 2);
+ size_t y = rc.bottom / 2;
+ for (size_t i = 0; i < ARRAYSIZE(windows); ++i) {
+ size_t top = y - (windows[i].height / 2);
+ ::MoveWindow(windows[i].wnd, static_cast<int>(x), static_cast<int>(top),
+ static_cast<int>(windows[i].width),
+ static_cast<int>(windows[i].height), TRUE);
+ x += kSeparator + windows[i].width;
+ if (windows[i].text[0] != 'X')
+ ::SetWindowTextW(windows[i].wnd, windows[i].text);
+ ::ShowWindow(windows[i].wnd, SW_SHOWNA);
+ }
+ } else {
+ for (size_t i = 0; i < ARRAYSIZE(windows); ++i) {
+ ::ShowWindow(windows[i].wnd, SW_HIDE);
+ }
+ }
+}
+
+void MainWnd::LayoutPeerListUI(bool show) {
+ if (show) {
+ RECT rc;
+ ::GetClientRect(wnd_, &rc);
+ ::MoveWindow(listbox_, 0, 0, rc.right, rc.bottom, TRUE);
+ ::ShowWindow(listbox_, SW_SHOWNA);
+ } else {
+ ::ShowWindow(listbox_, SW_HIDE);
+ InvalidateRect(wnd_, NULL, TRUE);
+ }
+}
+
+void MainWnd::HandleTabbing() {
+ bool shift = ((::GetAsyncKeyState(VK_SHIFT) & 0x8000) != 0);
+ UINT next_cmd = shift ? GW_HWNDPREV : GW_HWNDNEXT;
+ UINT loop_around_cmd = shift ? GW_HWNDLAST : GW_HWNDFIRST;
+ HWND focus = GetFocus(), next;
+ do {
+ next = ::GetWindow(focus, next_cmd);
+ if (IsWindowVisible(next) &&
+ (GetWindowLong(next, GWL_STYLE) & WS_TABSTOP)) {
+ break;
+ }
+
+ if (!next) {
+ next = ::GetWindow(focus, loop_around_cmd);
+ if (IsWindowVisible(next) &&
+ (GetWindowLong(next, GWL_STYLE) & WS_TABSTOP)) {
+ break;
+ }
+ }
+ focus = next;
+ } while (true);
+ ::SetFocus(next);
+}
+
+//
+// MainWnd::VideoRenderer
+//
+
+MainWnd::VideoRenderer::VideoRenderer(
+ HWND wnd,
+ int width,
+ int height,
+ webrtc::VideoTrackInterface* track_to_render)
+ : wnd_(wnd), rendered_track_(track_to_render) {
+ ::InitializeCriticalSection(&buffer_lock_);
+ ZeroMemory(&bmi_, sizeof(bmi_));
+ bmi_.bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
+ bmi_.bmiHeader.biPlanes = 1;
+ bmi_.bmiHeader.biBitCount = 32;
+ bmi_.bmiHeader.biCompression = BI_RGB;
+ bmi_.bmiHeader.biWidth = width;
+ bmi_.bmiHeader.biHeight = -height;
+ bmi_.bmiHeader.biSizeImage =
+ width * height * (bmi_.bmiHeader.biBitCount >> 3);
+ rendered_track_->AddOrUpdateSink(this, rtc::VideoSinkWants());
+}
+
+MainWnd::VideoRenderer::~VideoRenderer() {
+ rendered_track_->RemoveSink(this);
+ ::DeleteCriticalSection(&buffer_lock_);
+}
+
+void MainWnd::VideoRenderer::SetSize(int width, int height) {
+ AutoLock<VideoRenderer> lock(this);
+
+ if (width == bmi_.bmiHeader.biWidth && height == bmi_.bmiHeader.biHeight) {
+ return;
+ }
+
+ bmi_.bmiHeader.biWidth = width;
+ bmi_.bmiHeader.biHeight = -height;
+ bmi_.bmiHeader.biSizeImage =
+ width * height * (bmi_.bmiHeader.biBitCount >> 3);
+ image_.reset(new uint8_t[bmi_.bmiHeader.biSizeImage]);
+}
+
+void MainWnd::VideoRenderer::OnFrame(const webrtc::VideoFrame& video_frame) {
+ {
+ AutoLock<VideoRenderer> lock(this);
+
+ rtc::scoped_refptr<webrtc::I420BufferInterface> buffer(
+ video_frame.video_frame_buffer()->ToI420());
+ if (video_frame.rotation() != webrtc::kVideoRotation_0) {
+ buffer = webrtc::I420Buffer::Rotate(*buffer, video_frame.rotation());
+ }
+
+ SetSize(buffer->width(), buffer->height());
+
+ RTC_DCHECK(image_.get() != NULL);
+ libyuv::I420ToARGB(buffer->DataY(), buffer->StrideY(), buffer->DataU(),
+ buffer->StrideU(), buffer->DataV(), buffer->StrideV(),
+ image_.get(),
+ bmi_.bmiHeader.biWidth * bmi_.bmiHeader.biBitCount / 8,
+ buffer->width(), buffer->height());
+ }
+ InvalidateRect(wnd_, NULL, TRUE);
+}
diff --git a/third_party/libwebrtc/examples/peerconnection/client/main_wnd.h b/third_party/libwebrtc/examples/peerconnection/client/main_wnd.h
new file mode 100644
index 0000000000..898fea9d92
--- /dev/null
+++ b/third_party/libwebrtc/examples/peerconnection/client/main_wnd.h
@@ -0,0 +1,206 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef EXAMPLES_PEERCONNECTION_CLIENT_MAIN_WND_H_
+#define EXAMPLES_PEERCONNECTION_CLIENT_MAIN_WND_H_
+
+#include <map>
+#include <memory>
+#include <string>
+
+#include "api/media_stream_interface.h"
+#include "api/video/video_frame.h"
+#include "examples/peerconnection/client/peer_connection_client.h"
+#include "media/base/media_channel.h"
+#include "media/base/video_common.h"
+#if defined(WEBRTC_WIN)
+#include "rtc_base/win32.h"
+#endif // WEBRTC_WIN
+
+class MainWndCallback {
+ public:
+ virtual void StartLogin(const std::string& server, int port) = 0;
+ virtual void DisconnectFromServer() = 0;
+ virtual void ConnectToPeer(int peer_id) = 0;
+ virtual void DisconnectFromCurrentPeer() = 0;
+ virtual void UIThreadCallback(int msg_id, void* data) = 0;
+ virtual void Close() = 0;
+
+ protected:
+ virtual ~MainWndCallback() {}
+};
+
+// Pure virtual interface for the main window.
+class MainWindow {
+ public:
+ virtual ~MainWindow() {}
+
+ enum UI {
+ CONNECT_TO_SERVER,
+ LIST_PEERS,
+ STREAMING,
+ };
+
+ virtual void RegisterObserver(MainWndCallback* callback) = 0;
+
+ virtual bool IsWindow() = 0;
+ virtual void MessageBox(const char* caption,
+ const char* text,
+ bool is_error) = 0;
+
+ virtual UI current_ui() = 0;
+
+ virtual void SwitchToConnectUI() = 0;
+ virtual void SwitchToPeerList(const Peers& peers) = 0;
+ virtual void SwitchToStreamingUI() = 0;
+
+ virtual void StartLocalRenderer(webrtc::VideoTrackInterface* local_video) = 0;
+ virtual void StopLocalRenderer() = 0;
+ virtual void StartRemoteRenderer(
+ webrtc::VideoTrackInterface* remote_video) = 0;
+ virtual void StopRemoteRenderer() = 0;
+
+ virtual void QueueUIThreadCallback(int msg_id, void* data) = 0;
+};
+
+#ifdef WIN32
+
+class MainWnd : public MainWindow {
+ public:
+ static const wchar_t kClassName[];
+
+ enum WindowMessages {
+ UI_THREAD_CALLBACK = WM_APP + 1,
+ };
+
+ MainWnd(const char* server, int port, bool auto_connect, bool auto_call);
+ ~MainWnd();
+
+ bool Create();
+ bool Destroy();
+ bool PreTranslateMessage(MSG* msg);
+
+ virtual void RegisterObserver(MainWndCallback* callback);
+ virtual bool IsWindow();
+ virtual void SwitchToConnectUI();
+ virtual void SwitchToPeerList(const Peers& peers);
+ virtual void SwitchToStreamingUI();
+ virtual void MessageBox(const char* caption, const char* text, bool is_error);
+ virtual UI current_ui() { return ui_; }
+
+ virtual void StartLocalRenderer(webrtc::VideoTrackInterface* local_video);
+ virtual void StopLocalRenderer();
+ virtual void StartRemoteRenderer(webrtc::VideoTrackInterface* remote_video);
+ virtual void StopRemoteRenderer();
+
+ virtual void QueueUIThreadCallback(int msg_id, void* data);
+
+ HWND handle() const { return wnd_; }
+
+ class VideoRenderer : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
+ public:
+ VideoRenderer(HWND wnd,
+ int width,
+ int height,
+ webrtc::VideoTrackInterface* track_to_render);
+ virtual ~VideoRenderer();
+
+ void Lock() { ::EnterCriticalSection(&buffer_lock_); }
+
+ void Unlock() { ::LeaveCriticalSection(&buffer_lock_); }
+
+ // VideoSinkInterface implementation
+ void OnFrame(const webrtc::VideoFrame& frame) override;
+
+ const BITMAPINFO& bmi() const { return bmi_; }
+ const uint8_t* image() const { return image_.get(); }
+
+ protected:
+ void SetSize(int width, int height);
+
+ enum {
+ SET_SIZE,
+ RENDER_FRAME,
+ };
+
+ HWND wnd_;
+ BITMAPINFO bmi_;
+ std::unique_ptr<uint8_t[]> image_;
+ CRITICAL_SECTION buffer_lock_;
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> rendered_track_;
+ };
+
+ // A little helper class to make sure we always to proper locking and
+ // unlocking when working with VideoRenderer buffers.
+ template <typename T>
+ class AutoLock {
+ public:
+ explicit AutoLock(T* obj) : obj_(obj) { obj_->Lock(); }
+ ~AutoLock() { obj_->Unlock(); }
+
+ protected:
+ T* obj_;
+ };
+
+ protected:
+ enum ChildWindowID {
+ EDIT_ID = 1,
+ BUTTON_ID,
+ LABEL1_ID,
+ LABEL2_ID,
+ LISTBOX_ID,
+ };
+
+ void OnPaint();
+ void OnDestroyed();
+
+ void OnDefaultAction();
+
+ bool OnMessage(UINT msg, WPARAM wp, LPARAM lp, LRESULT* result);
+
+ static LRESULT CALLBACK WndProc(HWND hwnd, UINT msg, WPARAM wp, LPARAM lp);
+ static bool RegisterWindowClass();
+
+ void CreateChildWindow(HWND* wnd,
+ ChildWindowID id,
+ const wchar_t* class_name,
+ DWORD control_style,
+ DWORD ex_style);
+ void CreateChildWindows();
+
+ void LayoutConnectUI(bool show);
+ void LayoutPeerListUI(bool show);
+
+ void HandleTabbing();
+
+ private:
+ std::unique_ptr<VideoRenderer> local_renderer_;
+ std::unique_ptr<VideoRenderer> remote_renderer_;
+ UI ui_;
+ HWND wnd_;
+ DWORD ui_thread_id_;
+ HWND edit1_;
+ HWND edit2_;
+ HWND label1_;
+ HWND label2_;
+ HWND button_;
+ HWND listbox_;
+ bool destroyed_;
+ void* nested_msg_;
+ MainWndCallback* callback_;
+ static ATOM wnd_class_;
+ std::string server_;
+ std::string port_;
+ bool auto_connect_;
+ bool auto_call_;
+};
+#endif // WIN32
+
+#endif // EXAMPLES_PEERCONNECTION_CLIENT_MAIN_WND_H_
diff --git a/third_party/libwebrtc/examples/peerconnection/client/peer_connection_client.cc b/third_party/libwebrtc/examples/peerconnection/client/peer_connection_client.cc
new file mode 100644
index 0000000000..48d5bb6545
--- /dev/null
+++ b/third_party/libwebrtc/examples/peerconnection/client/peer_connection_client.cc
@@ -0,0 +1,493 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "examples/peerconnection/client/peer_connection_client.h"
+
+#include "api/units/time_delta.h"
+#include "examples/peerconnection/client/defaults.h"
+#include "rtc_base/async_dns_resolver.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/net_helpers.h"
+
+namespace {
+
+// This is our magical hangup signal.
+constexpr char kByeMessage[] = "BYE";
+// Delay between server connection retries, in milliseconds
+constexpr webrtc::TimeDelta kReconnectDelay = webrtc::TimeDelta::Seconds(2);
+
+rtc::Socket* CreateClientSocket(int family) {
+ rtc::Thread* thread = rtc::Thread::Current();
+ RTC_DCHECK(thread != NULL);
+ return thread->socketserver()->CreateSocket(family, SOCK_STREAM);
+}
+
+} // namespace
+
+PeerConnectionClient::PeerConnectionClient()
+ : callback_(NULL), resolver_(nullptr), state_(NOT_CONNECTED), my_id_(-1) {}
+
+PeerConnectionClient::~PeerConnectionClient() = default;
+
+void PeerConnectionClient::InitSocketSignals() {
+ RTC_DCHECK(control_socket_.get() != NULL);
+ RTC_DCHECK(hanging_get_.get() != NULL);
+ control_socket_->SignalCloseEvent.connect(this,
+ &PeerConnectionClient::OnClose);
+ hanging_get_->SignalCloseEvent.connect(this, &PeerConnectionClient::OnClose);
+ control_socket_->SignalConnectEvent.connect(this,
+ &PeerConnectionClient::OnConnect);
+ hanging_get_->SignalConnectEvent.connect(
+ this, &PeerConnectionClient::OnHangingGetConnect);
+ control_socket_->SignalReadEvent.connect(this, &PeerConnectionClient::OnRead);
+ hanging_get_->SignalReadEvent.connect(
+ this, &PeerConnectionClient::OnHangingGetRead);
+}
+
+int PeerConnectionClient::id() const {
+ return my_id_;
+}
+
+bool PeerConnectionClient::is_connected() const {
+ return my_id_ != -1;
+}
+
+const Peers& PeerConnectionClient::peers() const {
+ return peers_;
+}
+
+void PeerConnectionClient::RegisterObserver(
+ PeerConnectionClientObserver* callback) {
+ RTC_DCHECK(!callback_);
+ callback_ = callback;
+}
+
+void PeerConnectionClient::Connect(const std::string& server,
+ int port,
+ const std::string& client_name) {
+ RTC_DCHECK(!server.empty());
+ RTC_DCHECK(!client_name.empty());
+
+ if (state_ != NOT_CONNECTED) {
+ RTC_LOG(LS_WARNING)
+ << "The client must not be connected before you can call Connect()";
+ callback_->OnServerConnectionFailure();
+ return;
+ }
+
+ if (server.empty() || client_name.empty()) {
+ callback_->OnServerConnectionFailure();
+ return;
+ }
+
+ if (port <= 0)
+ port = kDefaultServerPort;
+
+ server_address_.SetIP(server);
+ server_address_.SetPort(port);
+ client_name_ = client_name;
+
+ if (server_address_.IsUnresolvedIP()) {
+ RTC_DCHECK_NE(state_, RESOLVING);
+ RTC_DCHECK(!resolver_);
+ state_ = RESOLVING;
+ resolver_ = std::make_unique<webrtc::AsyncDnsResolver>();
+ resolver_->Start(server_address_,
+ [this] { OnResolveResult(resolver_->result()); });
+ } else {
+ DoConnect();
+ }
+}
+
+void PeerConnectionClient::OnResolveResult(
+ const webrtc::AsyncDnsResolverResult& result) {
+ if (result.GetError() != 0) {
+ callback_->OnServerConnectionFailure();
+ resolver_.reset();
+ state_ = NOT_CONNECTED;
+ return;
+ }
+ if (!result.GetResolvedAddress(AF_INET, &server_address_)) {
+ callback_->OnServerConnectionFailure();
+ resolver_.reset();
+ state_ = NOT_CONNECTED;
+ return;
+ }
+ DoConnect();
+}
+
+void PeerConnectionClient::DoConnect() {
+ control_socket_.reset(CreateClientSocket(server_address_.ipaddr().family()));
+ hanging_get_.reset(CreateClientSocket(server_address_.ipaddr().family()));
+ InitSocketSignals();
+ char buffer[1024];
+ snprintf(buffer, sizeof(buffer), "GET /sign_in?%s HTTP/1.0\r\n\r\n",
+ client_name_.c_str());
+ onconnect_data_ = buffer;
+
+ bool ret = ConnectControlSocket();
+ if (ret)
+ state_ = SIGNING_IN;
+ if (!ret) {
+ callback_->OnServerConnectionFailure();
+ }
+}
+
+bool PeerConnectionClient::SendToPeer(int peer_id, const std::string& message) {
+ if (state_ != CONNECTED)
+ return false;
+
+ RTC_DCHECK(is_connected());
+ RTC_DCHECK(control_socket_->GetState() == rtc::Socket::CS_CLOSED);
+ if (!is_connected() || peer_id == -1)
+ return false;
+
+ char headers[1024];
+ snprintf(headers, sizeof(headers),
+ "POST /message?peer_id=%i&to=%i HTTP/1.0\r\n"
+ "Content-Length: %zu\r\n"
+ "Content-Type: text/plain\r\n"
+ "\r\n",
+ my_id_, peer_id, message.length());
+ onconnect_data_ = headers;
+ onconnect_data_ += message;
+ return ConnectControlSocket();
+}
+
+bool PeerConnectionClient::SendHangUp(int peer_id) {
+ return SendToPeer(peer_id, kByeMessage);
+}
+
+bool PeerConnectionClient::IsSendingMessage() {
+ return state_ == CONNECTED &&
+ control_socket_->GetState() != rtc::Socket::CS_CLOSED;
+}
+
+bool PeerConnectionClient::SignOut() {
+ if (state_ == NOT_CONNECTED || state_ == SIGNING_OUT)
+ return true;
+
+ if (hanging_get_->GetState() != rtc::Socket::CS_CLOSED)
+ hanging_get_->Close();
+
+ if (control_socket_->GetState() == rtc::Socket::CS_CLOSED) {
+ state_ = SIGNING_OUT;
+
+ if (my_id_ != -1) {
+ char buffer[1024];
+ snprintf(buffer, sizeof(buffer),
+ "GET /sign_out?peer_id=%i HTTP/1.0\r\n\r\n", my_id_);
+ onconnect_data_ = buffer;
+ return ConnectControlSocket();
+ } else {
+ // Can occur if the app is closed before we finish connecting.
+ return true;
+ }
+ } else {
+ state_ = SIGNING_OUT_WAITING;
+ }
+
+ return true;
+}
+
+void PeerConnectionClient::Close() {
+ control_socket_->Close();
+ hanging_get_->Close();
+ onconnect_data_.clear();
+ peers_.clear();
+ resolver_.reset();
+ my_id_ = -1;
+ state_ = NOT_CONNECTED;
+}
+
+bool PeerConnectionClient::ConnectControlSocket() {
+ RTC_DCHECK(control_socket_->GetState() == rtc::Socket::CS_CLOSED);
+ int err = control_socket_->Connect(server_address_);
+ if (err == SOCKET_ERROR) {
+ Close();
+ return false;
+ }
+ return true;
+}
+
+void PeerConnectionClient::OnConnect(rtc::Socket* socket) {
+ RTC_DCHECK(!onconnect_data_.empty());
+ size_t sent = socket->Send(onconnect_data_.c_str(), onconnect_data_.length());
+ RTC_DCHECK(sent == onconnect_data_.length());
+ onconnect_data_.clear();
+}
+
+void PeerConnectionClient::OnHangingGetConnect(rtc::Socket* socket) {
+ char buffer[1024];
+ snprintf(buffer, sizeof(buffer), "GET /wait?peer_id=%i HTTP/1.0\r\n\r\n",
+ my_id_);
+ int len = static_cast<int>(strlen(buffer));
+ int sent = socket->Send(buffer, len);
+ RTC_DCHECK(sent == len);
+}
+
+void PeerConnectionClient::OnMessageFromPeer(int peer_id,
+ const std::string& message) {
+ if (message.length() == (sizeof(kByeMessage) - 1) &&
+ message.compare(kByeMessage) == 0) {
+ callback_->OnPeerDisconnected(peer_id);
+ } else {
+ callback_->OnMessageFromPeer(peer_id, message);
+ }
+}
+
+bool PeerConnectionClient::GetHeaderValue(const std::string& data,
+ size_t eoh,
+ const char* header_pattern,
+ size_t* value) {
+ RTC_DCHECK(value != NULL);
+ size_t found = data.find(header_pattern);
+ if (found != std::string::npos && found < eoh) {
+ *value = atoi(&data[found + strlen(header_pattern)]);
+ return true;
+ }
+ return false;
+}
+
+bool PeerConnectionClient::GetHeaderValue(const std::string& data,
+ size_t eoh,
+ const char* header_pattern,
+ std::string* value) {
+ RTC_DCHECK(value != NULL);
+ size_t found = data.find(header_pattern);
+ if (found != std::string::npos && found < eoh) {
+ size_t begin = found + strlen(header_pattern);
+ size_t end = data.find("\r\n", begin);
+ if (end == std::string::npos)
+ end = eoh;
+ value->assign(data.substr(begin, end - begin));
+ return true;
+ }
+ return false;
+}
+
+bool PeerConnectionClient::ReadIntoBuffer(rtc::Socket* socket,
+ std::string* data,
+ size_t* content_length) {
+ char buffer[0xffff];
+ do {
+ int bytes = socket->Recv(buffer, sizeof(buffer), nullptr);
+ if (bytes <= 0)
+ break;
+ data->append(buffer, bytes);
+ } while (true);
+
+ bool ret = false;
+ size_t i = data->find("\r\n\r\n");
+ if (i != std::string::npos) {
+ RTC_LOG(LS_INFO) << "Headers received";
+ if (GetHeaderValue(*data, i, "\r\nContent-Length: ", content_length)) {
+ size_t total_response_size = (i + 4) + *content_length;
+ if (data->length() >= total_response_size) {
+ ret = true;
+ std::string should_close;
+ const char kConnection[] = "\r\nConnection: ";
+ if (GetHeaderValue(*data, i, kConnection, &should_close) &&
+ should_close.compare("close") == 0) {
+ socket->Close();
+ // Since we closed the socket, there was no notification delivered
+ // to us. Compensate by letting ourselves know.
+ OnClose(socket, 0);
+ }
+ } else {
+ // We haven't received everything. Just continue to accept data.
+ }
+ } else {
+ RTC_LOG(LS_ERROR) << "No content length field specified by the server.";
+ }
+ }
+ return ret;
+}
+
+void PeerConnectionClient::OnRead(rtc::Socket* socket) {
+ size_t content_length = 0;
+ if (ReadIntoBuffer(socket, &control_data_, &content_length)) {
+ size_t peer_id = 0, eoh = 0;
+ bool ok =
+ ParseServerResponse(control_data_, content_length, &peer_id, &eoh);
+ if (ok) {
+ if (my_id_ == -1) {
+ // First response. Let's store our server assigned ID.
+ RTC_DCHECK(state_ == SIGNING_IN);
+ my_id_ = static_cast<int>(peer_id);
+ RTC_DCHECK(my_id_ != -1);
+
+ // The body of the response will be a list of already connected peers.
+ if (content_length) {
+ size_t pos = eoh + 4;
+ while (pos < control_data_.size()) {
+ size_t eol = control_data_.find('\n', pos);
+ if (eol == std::string::npos)
+ break;
+ int id = 0;
+ std::string name;
+ bool connected;
+ if (ParseEntry(control_data_.substr(pos, eol - pos), &name, &id,
+ &connected) &&
+ id != my_id_) {
+ peers_[id] = name;
+ callback_->OnPeerConnected(id, name);
+ }
+ pos = eol + 1;
+ }
+ }
+ RTC_DCHECK(is_connected());
+ callback_->OnSignedIn();
+ } else if (state_ == SIGNING_OUT) {
+ Close();
+ callback_->OnDisconnected();
+ } else if (state_ == SIGNING_OUT_WAITING) {
+ SignOut();
+ }
+ }
+
+ control_data_.clear();
+
+ if (state_ == SIGNING_IN) {
+ RTC_DCHECK(hanging_get_->GetState() == rtc::Socket::CS_CLOSED);
+ state_ = CONNECTED;
+ hanging_get_->Connect(server_address_);
+ }
+ }
+}
+
+void PeerConnectionClient::OnHangingGetRead(rtc::Socket* socket) {
+ RTC_LOG(LS_INFO) << __FUNCTION__;
+ size_t content_length = 0;
+ if (ReadIntoBuffer(socket, &notification_data_, &content_length)) {
+ size_t peer_id = 0, eoh = 0;
+ bool ok =
+ ParseServerResponse(notification_data_, content_length, &peer_id, &eoh);
+
+ if (ok) {
+ // Store the position where the body begins.
+ size_t pos = eoh + 4;
+
+ if (my_id_ == static_cast<int>(peer_id)) {
+ // A notification about a new member or a member that just
+ // disconnected.
+ int id = 0;
+ std::string name;
+ bool connected = false;
+ if (ParseEntry(notification_data_.substr(pos), &name, &id,
+ &connected)) {
+ if (connected) {
+ peers_[id] = name;
+ callback_->OnPeerConnected(id, name);
+ } else {
+ peers_.erase(id);
+ callback_->OnPeerDisconnected(id);
+ }
+ }
+ } else {
+ OnMessageFromPeer(static_cast<int>(peer_id),
+ notification_data_.substr(pos));
+ }
+ }
+
+ notification_data_.clear();
+ }
+
+ if (hanging_get_->GetState() == rtc::Socket::CS_CLOSED &&
+ state_ == CONNECTED) {
+ hanging_get_->Connect(server_address_);
+ }
+}
+
+bool PeerConnectionClient::ParseEntry(const std::string& entry,
+ std::string* name,
+ int* id,
+ bool* connected) {
+ RTC_DCHECK(name != NULL);
+ RTC_DCHECK(id != NULL);
+ RTC_DCHECK(connected != NULL);
+ RTC_DCHECK(!entry.empty());
+
+ *connected = false;
+ size_t separator = entry.find(',');
+ if (separator != std::string::npos) {
+ *id = atoi(&entry[separator + 1]);
+ name->assign(entry.substr(0, separator));
+ separator = entry.find(',', separator + 1);
+ if (separator != std::string::npos) {
+ *connected = atoi(&entry[separator + 1]) ? true : false;
+ }
+ }
+ return !name->empty();
+}
+
+int PeerConnectionClient::GetResponseStatus(const std::string& response) {
+ int status = -1;
+ size_t pos = response.find(' ');
+ if (pos != std::string::npos)
+ status = atoi(&response[pos + 1]);
+ return status;
+}
+
+bool PeerConnectionClient::ParseServerResponse(const std::string& response,
+ size_t content_length,
+ size_t* peer_id,
+ size_t* eoh) {
+ int status = GetResponseStatus(response.c_str());
+ if (status != 200) {
+ RTC_LOG(LS_ERROR) << "Received error from server";
+ Close();
+ callback_->OnDisconnected();
+ return false;
+ }
+
+ *eoh = response.find("\r\n\r\n");
+ RTC_DCHECK(*eoh != std::string::npos);
+ if (*eoh == std::string::npos)
+ return false;
+
+ *peer_id = -1;
+
+ // See comment in peer_channel.cc for why we use the Pragma header.
+ GetHeaderValue(response, *eoh, "\r\nPragma: ", peer_id);
+
+ return true;
+}
+
+void PeerConnectionClient::OnClose(rtc::Socket* socket, int err) {
+ RTC_LOG(LS_INFO) << __FUNCTION__;
+
+ socket->Close();
+
+#ifdef WIN32
+ if (err != WSAECONNREFUSED) {
+#else
+ if (err != ECONNREFUSED) {
+#endif
+ if (socket == hanging_get_.get()) {
+ if (state_ == CONNECTED) {
+ hanging_get_->Close();
+ hanging_get_->Connect(server_address_);
+ }
+ } else {
+ callback_->OnMessageSent(err);
+ }
+ } else {
+ if (socket == control_socket_.get()) {
+ RTC_LOG(LS_WARNING) << "Connection refused; retrying in 2 seconds";
+ rtc::Thread::Current()->PostDelayedTask(
+ SafeTask(safety_.flag(), [this] { DoConnect(); }), kReconnectDelay);
+ } else {
+ Close();
+ callback_->OnDisconnected();
+ }
+ }
+}
diff --git a/third_party/libwebrtc/examples/peerconnection/client/peer_connection_client.h b/third_party/libwebrtc/examples/peerconnection/client/peer_connection_client.h
new file mode 100644
index 0000000000..d56752a7fa
--- /dev/null
+++ b/third_party/libwebrtc/examples/peerconnection/client/peer_connection_client.h
@@ -0,0 +1,130 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef EXAMPLES_PEERCONNECTION_CLIENT_PEER_CONNECTION_CLIENT_H_
+#define EXAMPLES_PEERCONNECTION_CLIENT_PEER_CONNECTION_CLIENT_H_
+
+#include <map>
+#include <memory>
+#include <string>
+
+#include "api/async_dns_resolver.h"
+#include "api/task_queue/pending_task_safety_flag.h"
+#include "rtc_base/net_helpers.h"
+#include "rtc_base/physical_socket_server.h"
+#include "rtc_base/third_party/sigslot/sigslot.h"
+
+typedef std::map<int, std::string> Peers;
+
+struct PeerConnectionClientObserver {
+ virtual void OnSignedIn() = 0; // Called when we're logged on.
+ virtual void OnDisconnected() = 0;
+ virtual void OnPeerConnected(int id, const std::string& name) = 0;
+ virtual void OnPeerDisconnected(int peer_id) = 0;
+ virtual void OnMessageFromPeer(int peer_id, const std::string& message) = 0;
+ virtual void OnMessageSent(int err) = 0;
+ virtual void OnServerConnectionFailure() = 0;
+
+ protected:
+ virtual ~PeerConnectionClientObserver() {}
+};
+
+class PeerConnectionClient : public sigslot::has_slots<> {
+ public:
+ enum State {
+ NOT_CONNECTED,
+ RESOLVING,
+ SIGNING_IN,
+ CONNECTED,
+ SIGNING_OUT_WAITING,
+ SIGNING_OUT,
+ };
+
+ PeerConnectionClient();
+ ~PeerConnectionClient();
+
+ int id() const;
+ bool is_connected() const;
+ const Peers& peers() const;
+
+ void RegisterObserver(PeerConnectionClientObserver* callback);
+
+ void Connect(const std::string& server,
+ int port,
+ const std::string& client_name);
+
+ bool SendToPeer(int peer_id, const std::string& message);
+ bool SendHangUp(int peer_id);
+ bool IsSendingMessage();
+
+ bool SignOut();
+
+ protected:
+ void DoConnect();
+ void Close();
+ void InitSocketSignals();
+ bool ConnectControlSocket();
+ void OnConnect(rtc::Socket* socket);
+ void OnHangingGetConnect(rtc::Socket* socket);
+ void OnMessageFromPeer(int peer_id, const std::string& message);
+
+ // Quick and dirty support for parsing HTTP header values.
+ bool GetHeaderValue(const std::string& data,
+ size_t eoh,
+ const char* header_pattern,
+ size_t* value);
+
+ bool GetHeaderValue(const std::string& data,
+ size_t eoh,
+ const char* header_pattern,
+ std::string* value);
+
+ // Returns true if the whole response has been read.
+ bool ReadIntoBuffer(rtc::Socket* socket,
+ std::string* data,
+ size_t* content_length);
+
+ void OnRead(rtc::Socket* socket);
+
+ void OnHangingGetRead(rtc::Socket* socket);
+
+ // Parses a single line entry in the form "<name>,<id>,<connected>"
+ bool ParseEntry(const std::string& entry,
+ std::string* name,
+ int* id,
+ bool* connected);
+
+ int GetResponseStatus(const std::string& response);
+
+ bool ParseServerResponse(const std::string& response,
+ size_t content_length,
+ size_t* peer_id,
+ size_t* eoh);
+
+ void OnClose(rtc::Socket* socket, int err);
+
+ void OnResolveResult(const webrtc::AsyncDnsResolverResult& result);
+
+ PeerConnectionClientObserver* callback_;
+ rtc::SocketAddress server_address_;
+ std::unique_ptr<webrtc::AsyncDnsResolverInterface> resolver_;
+ std::unique_ptr<rtc::Socket> control_socket_;
+ std::unique_ptr<rtc::Socket> hanging_get_;
+ std::string onconnect_data_;
+ std::string control_data_;
+ std::string notification_data_;
+ std::string client_name_;
+ Peers peers_;
+ State state_;
+ int my_id_;
+ webrtc::ScopedTaskSafety safety_;
+};
+
+#endif // EXAMPLES_PEERCONNECTION_CLIENT_PEER_CONNECTION_CLIENT_H_
diff --git a/third_party/libwebrtc/examples/peerconnection/server/data_socket.cc b/third_party/libwebrtc/examples/peerconnection/server/data_socket.cc
new file mode 100644
index 0000000000..855ebd8c0c
--- /dev/null
+++ b/third_party/libwebrtc/examples/peerconnection/server/data_socket.cc
@@ -0,0 +1,299 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "examples/peerconnection/server/data_socket.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#if defined(WEBRTC_POSIX)
+#include <unistd.h>
+#endif
+
+#include "examples/peerconnection/server/utils.h"
+#include "rtc_base/checks.h"
+
+static const char kHeaderTerminator[] = "\r\n\r\n";
+static const int kHeaderTerminatorLength = sizeof(kHeaderTerminator) - 1;
+
+// static
+const char DataSocket::kCrossOriginAllowHeaders[] =
+ "Access-Control-Allow-Origin: *\r\n"
+ "Access-Control-Allow-Credentials: true\r\n"
+ "Access-Control-Allow-Methods: POST, GET, OPTIONS\r\n"
+ "Access-Control-Allow-Headers: Content-Type, "
+ "Content-Length, Connection, Cache-Control\r\n"
+ "Access-Control-Expose-Headers: Content-Length\r\n";
+
+#if defined(WIN32)
+class WinsockInitializer {
+ static WinsockInitializer singleton;
+
+ WinsockInitializer() {
+ WSADATA data;
+ WSAStartup(MAKEWORD(1, 0), &data);
+ }
+
+ public:
+ ~WinsockInitializer() { WSACleanup(); }
+};
+WinsockInitializer WinsockInitializer::singleton;
+#endif
+
+//
+// SocketBase
+//
+
+bool SocketBase::Create() {
+ RTC_DCHECK(!valid());
+ socket_ = ::socket(AF_INET, SOCK_STREAM, 0);
+ return valid();
+}
+
+void SocketBase::Close() {
+ if (socket_ != INVALID_SOCKET) {
+ closesocket(socket_);
+ socket_ = INVALID_SOCKET;
+ }
+}
+
+//
+// DataSocket
+//
+
+std::string DataSocket::request_arguments() const {
+ size_t args = request_path_.find('?');
+ if (args != std::string::npos)
+ return request_path_.substr(args + 1);
+ return "";
+}
+
+bool DataSocket::PathEquals(const char* path) const {
+ RTC_DCHECK(path);
+ size_t args = request_path_.find('?');
+ if (args != std::string::npos)
+ return request_path_.substr(0, args).compare(path) == 0;
+ return request_path_.compare(path) == 0;
+}
+
+bool DataSocket::OnDataAvailable(bool* close_socket) {
+ RTC_DCHECK(valid());
+ char buffer[0xfff] = {0};
+ int bytes = recv(socket_, buffer, sizeof(buffer), 0);
+ if (bytes == SOCKET_ERROR || bytes == 0) {
+ *close_socket = true;
+ return false;
+ }
+
+ *close_socket = false;
+
+ bool ret = true;
+ if (headers_received()) {
+ if (method_ != POST) {
+ // unexpectedly received data.
+ ret = false;
+ } else {
+ data_.append(buffer, bytes);
+ }
+ } else {
+ request_headers_.append(buffer, bytes);
+ size_t found = request_headers_.find(kHeaderTerminator);
+ if (found != std::string::npos) {
+ data_ = request_headers_.substr(found + kHeaderTerminatorLength);
+ request_headers_.resize(found + kHeaderTerminatorLength);
+ ret = ParseHeaders();
+ }
+ }
+ return ret;
+}
+
+bool DataSocket::Send(const std::string& data) const {
+ return send(socket_, data.data(), static_cast<int>(data.length()), 0) !=
+ SOCKET_ERROR;
+}
+
+bool DataSocket::Send(const std::string& status,
+ bool connection_close,
+ const std::string& content_type,
+ const std::string& extra_headers,
+ const std::string& data) const {
+ RTC_DCHECK(valid());
+ RTC_DCHECK(!status.empty());
+ std::string buffer("HTTP/1.1 " + status + "\r\n");
+
+ buffer +=
+ "Server: PeerConnectionTestServer/0.1\r\n"
+ "Cache-Control: no-cache\r\n";
+
+ if (connection_close)
+ buffer += "Connection: close\r\n";
+
+ if (!content_type.empty())
+ buffer += "Content-Type: " + content_type + "\r\n";
+
+ buffer +=
+ "Content-Length: " + int2str(static_cast<int>(data.size())) + "\r\n";
+
+ if (!extra_headers.empty()) {
+ buffer += extra_headers;
+ // Extra headers are assumed to have a separator per header.
+ }
+
+ buffer += kCrossOriginAllowHeaders;
+
+ buffer += "\r\n";
+ buffer += data;
+
+ return Send(buffer);
+}
+
+void DataSocket::Clear() {
+ method_ = INVALID;
+ content_length_ = 0;
+ content_type_.clear();
+ request_path_.clear();
+ request_headers_.clear();
+ data_.clear();
+}
+
+bool DataSocket::ParseHeaders() {
+ RTC_DCHECK(!request_headers_.empty());
+ RTC_DCHECK_EQ(method_, INVALID);
+ size_t i = request_headers_.find("\r\n");
+ if (i == std::string::npos)
+ return false;
+
+ if (!ParseMethodAndPath(request_headers_.data(), i))
+ return false;
+
+ RTC_DCHECK_NE(method_, INVALID);
+ RTC_DCHECK(!request_path_.empty());
+
+ if (method_ == POST) {
+ const char* headers = request_headers_.data() + i + 2;
+ size_t len = request_headers_.length() - i - 2;
+ if (!ParseContentLengthAndType(headers, len))
+ return false;
+ }
+
+ return true;
+}
+
+bool DataSocket::ParseMethodAndPath(const char* begin, size_t len) {
+ struct {
+ const char* method_name;
+ size_t method_name_len;
+ RequestMethod id;
+ } supported_methods[] = {
+ {"GET", 3, GET},
+ {"POST", 4, POST},
+ {"OPTIONS", 7, OPTIONS},
+ };
+
+ const char* path = NULL;
+ for (size_t i = 0; i < ARRAYSIZE(supported_methods); ++i) {
+ if (len > supported_methods[i].method_name_len &&
+ isspace(begin[supported_methods[i].method_name_len]) &&
+ strncmp(begin, supported_methods[i].method_name,
+ supported_methods[i].method_name_len) == 0) {
+ method_ = supported_methods[i].id;
+ path = begin + supported_methods[i].method_name_len;
+ break;
+ }
+ }
+
+ const char* end = begin + len;
+ if (!path || path >= end)
+ return false;
+
+ ++path;
+ begin = path;
+ while (!isspace(*path) && path < end)
+ ++path;
+
+ request_path_.assign(begin, path - begin);
+
+ return true;
+}
+
+bool DataSocket::ParseContentLengthAndType(const char* headers, size_t length) {
+ RTC_DCHECK_EQ(content_length_, 0);
+ RTC_DCHECK(content_type_.empty());
+
+ const char* end = headers + length;
+ while (headers && headers < end) {
+ if (!isspace(headers[0])) {
+ static const char kContentLength[] = "Content-Length:";
+ static const char kContentType[] = "Content-Type:";
+ if ((headers + ARRAYSIZE(kContentLength)) < end &&
+ strncmp(headers, kContentLength, ARRAYSIZE(kContentLength) - 1) ==
+ 0) {
+ headers += ARRAYSIZE(kContentLength) - 1;
+ while (headers[0] == ' ')
+ ++headers;
+ content_length_ = atoi(headers);
+ } else if ((headers + ARRAYSIZE(kContentType)) < end &&
+ strncmp(headers, kContentType, ARRAYSIZE(kContentType) - 1) ==
+ 0) {
+ headers += ARRAYSIZE(kContentType) - 1;
+ while (headers[0] == ' ')
+ ++headers;
+ const char* type_end = strstr(headers, "\r\n");
+ if (type_end == NULL)
+ type_end = end;
+ content_type_.assign(headers, type_end);
+ }
+ } else {
+ ++headers;
+ }
+ headers = strstr(headers, "\r\n");
+ if (headers)
+ headers += 2;
+ }
+
+ return !content_type_.empty() && content_length_ != 0;
+}
+
+//
+// ListeningSocket
+//
+
+bool ListeningSocket::Listen(unsigned short port) {
+ RTC_DCHECK(valid());
+ int enabled = 1;
+ if (setsockopt(socket_, SOL_SOCKET, SO_REUSEADDR,
+ reinterpret_cast<const char*>(&enabled),
+ sizeof(enabled)) != 0) {
+ printf("setsockopt failed\n");
+ return false;
+ }
+ struct sockaddr_in addr = {0};
+ addr.sin_family = AF_INET;
+ addr.sin_addr.s_addr = htonl(INADDR_ANY);
+ addr.sin_port = htons(port);
+ if (bind(socket_, reinterpret_cast<const sockaddr*>(&addr), sizeof(addr)) ==
+ SOCKET_ERROR) {
+ printf("bind failed\n");
+ return false;
+ }
+ return listen(socket_, 5) != SOCKET_ERROR;
+}
+
+DataSocket* ListeningSocket::Accept() const {
+ RTC_DCHECK(valid());
+ struct sockaddr_in addr = {0};
+ socklen_t size = sizeof(addr);
+ NativeSocket client =
+ accept(socket_, reinterpret_cast<sockaddr*>(&addr), &size);
+ if (client == INVALID_SOCKET)
+ return NULL;
+
+ return new DataSocket(client);
+}
diff --git a/third_party/libwebrtc/examples/peerconnection/server/data_socket.h b/third_party/libwebrtc/examples/peerconnection/server/data_socket.h
new file mode 100644
index 0000000000..57ad5b9aee
--- /dev/null
+++ b/third_party/libwebrtc/examples/peerconnection/server/data_socket.h
@@ -0,0 +1,152 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef EXAMPLES_PEERCONNECTION_SERVER_DATA_SOCKET_H_
+#define EXAMPLES_PEERCONNECTION_SERVER_DATA_SOCKET_H_
+
+#ifdef WIN32
+#include <winsock2.h>
+typedef int socklen_t;
+typedef SOCKET NativeSocket;
+#else
+#include <netinet/in.h>
+#include <sys/select.h>
+#include <sys/socket.h>
+#define closesocket close
+typedef int NativeSocket;
+
+#ifndef SOCKET_ERROR
+#define SOCKET_ERROR (-1)
+#endif
+
+#ifndef INVALID_SOCKET
+#define INVALID_SOCKET static_cast<NativeSocket>(-1)
+#endif
+#endif
+
+#include <string>
+
+class SocketBase {
+ public:
+ SocketBase() : socket_(INVALID_SOCKET) {}
+ explicit SocketBase(NativeSocket socket) : socket_(socket) {}
+ SocketBase(SocketBase& other) = delete;
+ SocketBase& operator=(const SocketBase& other) = delete;
+ ~SocketBase() { Close(); }
+
+ NativeSocket socket() const { return socket_; }
+ bool valid() const { return socket_ != INVALID_SOCKET; }
+
+ bool Create();
+ void Close();
+
+ protected:
+ NativeSocket socket_;
+};
+
+// Represents an HTTP server socket.
+class DataSocket : public SocketBase {
+ public:
+ enum RequestMethod {
+ INVALID,
+ GET,
+ POST,
+ OPTIONS,
+ };
+
+ explicit DataSocket(NativeSocket socket)
+ : SocketBase(socket), method_(INVALID), content_length_(0) {}
+
+ ~DataSocket() {}
+
+ static const char kCrossOriginAllowHeaders[];
+
+ bool headers_received() const { return method_ != INVALID; }
+
+ RequestMethod method() const { return method_; }
+
+ const std::string& request_path() const { return request_path_; }
+ std::string request_arguments() const;
+
+ const std::string& data() const { return data_; }
+
+ const std::string& content_type() const { return content_type_; }
+
+ size_t content_length() const { return content_length_; }
+
+ bool request_received() const {
+ return headers_received() && (method_ != POST || data_received());
+ }
+
+ bool data_received() const {
+ return method_ != POST || data_.length() >= content_length_;
+ }
+
+ // Checks if the request path (minus arguments) matches a given path.
+ bool PathEquals(const char* path) const;
+
+ // Called when we have received some data from clients.
+ // Returns false if an error occurred.
+ bool OnDataAvailable(bool* close_socket);
+
+ // Send a raw buffer of bytes.
+ bool Send(const std::string& data) const;
+
+ // Send an HTTP response. The `status` should start with a valid HTTP
+ // response code, followed by a string. E.g. "200 OK".
+ // If `connection_close` is set to true, an extra "Connection: close" HTTP
+ // header will be included. `content_type` is the mime content type, not
+ // including the "Content-Type: " string.
+ // `extra_headers` should be either empty or a list of headers where each
+ // header terminates with "\r\n".
+ // `data` is the body of the message. It's length will be specified via
+ // a "Content-Length" header.
+ bool Send(const std::string& status,
+ bool connection_close,
+ const std::string& content_type,
+ const std::string& extra_headers,
+ const std::string& data) const;
+
+ // Clears all held state and prepares the socket for receiving a new request.
+ void Clear();
+
+ protected:
+ // A fairly relaxed HTTP header parser. Parses the method, path and
+ // content length (POST only) of a request.
+ // Returns true if a valid request was received and no errors occurred.
+ bool ParseHeaders();
+
+ // Figures out whether the request is a GET or POST and what path is
+ // being requested.
+ bool ParseMethodAndPath(const char* begin, size_t len);
+
+ // Determines the length of the body and it's mime type.
+ bool ParseContentLengthAndType(const char* headers, size_t length);
+
+ protected:
+ RequestMethod method_;
+ size_t content_length_;
+ std::string content_type_;
+ std::string request_path_;
+ std::string request_headers_;
+ std::string data_;
+};
+
+// The server socket. Accepts connections and generates DataSocket instances
+// for each new connection.
+class ListeningSocket : public SocketBase {
+ public:
+ ListeningSocket() {}
+
+ bool Listen(unsigned short port);
+ DataSocket* Accept() const;
+};
+
+#endif // EXAMPLES_PEERCONNECTION_SERVER_DATA_SOCKET_H_
diff --git a/third_party/libwebrtc/examples/peerconnection/server/main.cc b/third_party/libwebrtc/examples/peerconnection/server/main.cc
new file mode 100644
index 0000000000..50b8c23401
--- /dev/null
+++ b/third_party/libwebrtc/examples/peerconnection/server/main.cc
@@ -0,0 +1,193 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#if defined(WEBRTC_POSIX)
+#include <sys/select.h>
+#endif
+#include <time.h>
+
+#include <string>
+#include <vector>
+
+#include "absl/flags/flag.h"
+#include "absl/flags/parse.h"
+#include "absl/flags/usage.h"
+#include "examples/peerconnection/server/data_socket.h"
+#include "examples/peerconnection/server/peer_channel.h"
+#include "rtc_base/checks.h"
+#include "system_wrappers/include/field_trial.h"
+#include "test/field_trial.h"
+
+ABSL_FLAG(
+ std::string,
+ force_fieldtrials,
+ "",
+ "Field trials control experimental features. This flag specifies the field "
+ "trials in effect. E.g. running with "
+ "--force_fieldtrials=WebRTC-FooFeature/Enabled/ "
+ "will assign the group Enabled to field trial WebRTC-FooFeature. Multiple "
+ "trials are separated by \"/\"");
+ABSL_FLAG(int, port, 8888, "default: 8888");
+
+static const size_t kMaxConnections = (FD_SETSIZE - 2);
+
+void HandleBrowserRequest(DataSocket* ds, bool* quit) {
+ RTC_DCHECK(ds && ds->valid());
+ RTC_DCHECK(quit);
+
+ const std::string& path = ds->request_path();
+
+ *quit = (path.compare("/quit") == 0);
+
+ if (*quit) {
+ ds->Send("200 OK", true, "text/html", "",
+ "<html><body>Quitting...</body></html>");
+ } else if (ds->method() == DataSocket::OPTIONS) {
+ // We'll get this when a browsers do cross-resource-sharing requests.
+ // The headers to allow cross-origin script support will be set inside
+ // Send.
+ ds->Send("200 OK", true, "", "", "");
+ } else {
+ // Here we could write some useful output back to the browser depending on
+ // the path.
+ printf("Received an invalid request: %s\n", ds->request_path().c_str());
+ ds->Send("500 Sorry", true, "text/html", "",
+ "<html><body>Sorry, not yet implemented</body></html>");
+ }
+}
+
+int main(int argc, char* argv[]) {
+ absl::SetProgramUsageMessage(
+ "Example usage: ./peerconnection_server --port=8888\n");
+ absl::ParseCommandLine(argc, argv);
+
+ // InitFieldTrialsFromString stores the char*, so the char array must outlive
+ // the application.
+ const std::string force_field_trials = absl::GetFlag(FLAGS_force_fieldtrials);
+ webrtc::field_trial::InitFieldTrialsFromString(force_field_trials.c_str());
+
+ int port = absl::GetFlag(FLAGS_port);
+
+ // Abort if the user specifies a port that is outside the allowed
+ // range [1, 65535].
+ if ((port < 1) || (port > 65535)) {
+ printf("Error: %i is not a valid port.\n", port);
+ return -1;
+ }
+
+ ListeningSocket listener;
+ if (!listener.Create()) {
+ printf("Failed to create server socket\n");
+ return -1;
+ } else if (!listener.Listen(port)) {
+ printf("Failed to listen on server socket\n");
+ return -1;
+ }
+
+ printf("Server listening on port %i\n", port);
+
+ PeerChannel clients;
+ typedef std::vector<DataSocket*> SocketArray;
+ SocketArray sockets;
+ bool quit = false;
+ while (!quit) {
+ fd_set socket_set;
+ FD_ZERO(&socket_set);
+ if (listener.valid())
+ FD_SET(listener.socket(), &socket_set);
+
+ for (SocketArray::iterator i = sockets.begin(); i != sockets.end(); ++i)
+ FD_SET((*i)->socket(), &socket_set);
+
+ struct timeval timeout = {10, 0};
+ if (select(FD_SETSIZE, &socket_set, NULL, NULL, &timeout) == SOCKET_ERROR) {
+ printf("select failed\n");
+ break;
+ }
+
+ for (SocketArray::iterator i = sockets.begin(); i != sockets.end(); ++i) {
+ DataSocket* s = *i;
+ bool socket_done = true;
+ if (FD_ISSET(s->socket(), &socket_set)) {
+ if (s->OnDataAvailable(&socket_done) && s->request_received()) {
+ ChannelMember* member = clients.Lookup(s);
+ if (member || PeerChannel::IsPeerConnection(s)) {
+ if (!member) {
+ if (s->PathEquals("/sign_in")) {
+ clients.AddMember(s);
+ } else {
+ printf("No member found for: %s\n", s->request_path().c_str());
+ s->Send("500 Error", true, "text/plain", "",
+ "Peer most likely gone.");
+ }
+ } else if (member->is_wait_request(s)) {
+ // no need to do anything.
+ socket_done = false;
+ } else {
+ ChannelMember* target = clients.IsTargetedRequest(s);
+ if (target) {
+ member->ForwardRequestToPeer(s, target);
+ } else if (s->PathEquals("/sign_out")) {
+ s->Send("200 OK", true, "text/plain", "", "");
+ } else {
+ printf("Couldn't find target for request: %s\n",
+ s->request_path().c_str());
+ s->Send("500 Error", true, "text/plain", "",
+ "Peer most likely gone.");
+ }
+ }
+ } else {
+ HandleBrowserRequest(s, &quit);
+ if (quit) {
+ printf("Quitting...\n");
+ FD_CLR(listener.socket(), &socket_set);
+ listener.Close();
+ clients.CloseAll();
+ }
+ }
+ }
+ } else {
+ socket_done = false;
+ }
+
+ if (socket_done) {
+ printf("Disconnecting socket\n");
+ clients.OnClosing(s);
+ RTC_DCHECK(s->valid()); // Close must not have been called yet.
+ FD_CLR(s->socket(), &socket_set);
+ delete (*i);
+ i = sockets.erase(i);
+ if (i == sockets.end())
+ break;
+ }
+ }
+
+ clients.CheckForTimeout();
+
+ if (FD_ISSET(listener.socket(), &socket_set)) {
+ DataSocket* s = listener.Accept();
+ if (sockets.size() >= kMaxConnections) {
+ delete s; // sorry, that's all we can take.
+ printf("Connection limit reached\n");
+ } else {
+ sockets.push_back(s);
+ printf("New connection...\n");
+ }
+ }
+ }
+
+ for (SocketArray::iterator i = sockets.begin(); i != sockets.end(); ++i)
+ delete (*i);
+ sockets.clear();
+
+ return 0;
+}
diff --git a/third_party/libwebrtc/examples/peerconnection/server/peer_channel.cc b/third_party/libwebrtc/examples/peerconnection/server/peer_channel.cc
new file mode 100644
index 0000000000..f53820cc60
--- /dev/null
+++ b/third_party/libwebrtc/examples/peerconnection/server/peer_channel.cc
@@ -0,0 +1,360 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "examples/peerconnection/server/peer_channel.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+
+#include <algorithm>
+
+#include "examples/peerconnection/server/data_socket.h"
+#include "examples/peerconnection/server/utils.h"
+#include "rtc_base/checks.h"
+
+// Set to the peer id of the originator when messages are being
+// exchanged between peers, but set to the id of the receiving peer
+// itself when notifications are sent from the server about the state
+// of other peers.
+//
+// WORKAROUND: Since support for CORS varies greatly from one browser to the
+// next, we don't use a custom name for our peer-id header (originally it was
+// "X-Peer-Id: "). Instead, we use a "simple header", "Pragma" which should
+// always be exposed to CORS requests. There is a special CORS header devoted
+// to exposing proprietary headers (Access-Control-Expose-Headers), however
+// at this point it is not working correctly in some popular browsers.
+static const char kPeerIdHeader[] = "Pragma: ";
+
+static const char* kRequestPaths[] = {
+ "/wait",
+ "/sign_out",
+ "/message",
+};
+
+enum RequestPathIndex {
+ kWait,
+ kSignOut,
+ kMessage,
+};
+
+const size_t kMaxNameLength = 512;
+
+//
+// ChannelMember
+//
+
+int ChannelMember::s_member_id_ = 0;
+
+ChannelMember::ChannelMember(DataSocket* socket)
+ : waiting_socket_(NULL),
+ id_(++s_member_id_),
+ connected_(true),
+ timestamp_(time(NULL)) {
+ RTC_DCHECK(socket);
+ RTC_DCHECK_EQ(socket->method(), DataSocket::GET);
+ RTC_DCHECK(socket->PathEquals("/sign_in"));
+ name_ = socket->request_arguments();
+ if (name_.empty())
+ name_ = "peer_" + int2str(id_);
+ else if (name_.length() > kMaxNameLength)
+ name_.resize(kMaxNameLength);
+
+ std::replace(name_.begin(), name_.end(), ',', '_');
+}
+
+ChannelMember::~ChannelMember() {}
+
+bool ChannelMember::is_wait_request(DataSocket* ds) const {
+ return ds && ds->PathEquals(kRequestPaths[kWait]);
+}
+
+bool ChannelMember::TimedOut() {
+ return waiting_socket_ == NULL && (time(NULL) - timestamp_) > 30;
+}
+
+std::string ChannelMember::GetPeerIdHeader() const {
+ std::string ret(kPeerIdHeader + int2str(id_) + "\r\n");
+ return ret;
+}
+
+bool ChannelMember::NotifyOfOtherMember(const ChannelMember& other) {
+ RTC_DCHECK_NE(&other, this);
+ QueueResponse("200 OK", "text/plain", GetPeerIdHeader(), other.GetEntry());
+ return true;
+}
+
+// Returns a string in the form "name,id,connected\n".
+std::string ChannelMember::GetEntry() const {
+ RTC_DCHECK(name_.length() <= kMaxNameLength);
+
+ // name, 11-digit int, 1-digit bool, newline, null
+ char entry[kMaxNameLength + 15];
+ snprintf(entry, sizeof(entry), "%s,%d,%d\n",
+ name_.substr(0, kMaxNameLength).c_str(), id_, connected_);
+ return entry;
+}
+
+void ChannelMember::ForwardRequestToPeer(DataSocket* ds, ChannelMember* peer) {
+ RTC_DCHECK(peer);
+ RTC_DCHECK(ds);
+
+ std::string extra_headers(GetPeerIdHeader());
+
+ if (peer == this) {
+ ds->Send("200 OK", true, ds->content_type(), extra_headers, ds->data());
+ } else {
+ printf("Client %s sending to %s\n", name_.c_str(), peer->name().c_str());
+ peer->QueueResponse("200 OK", ds->content_type(), extra_headers,
+ ds->data());
+ ds->Send("200 OK", true, "text/plain", "", "");
+ }
+}
+
+void ChannelMember::OnClosing(DataSocket* ds) {
+ if (ds == waiting_socket_) {
+ waiting_socket_ = NULL;
+ timestamp_ = time(NULL);
+ }
+}
+
+void ChannelMember::QueueResponse(const std::string& status,
+ const std::string& content_type,
+ const std::string& extra_headers,
+ const std::string& data) {
+ if (waiting_socket_) {
+ RTC_DCHECK(queue_.empty());
+ RTC_DCHECK_EQ(waiting_socket_->method(), DataSocket::GET);
+ bool ok =
+ waiting_socket_->Send(status, true, content_type, extra_headers, data);
+ if (!ok) {
+ printf("Failed to deliver data to waiting socket\n");
+ }
+ waiting_socket_ = NULL;
+ timestamp_ = time(NULL);
+ } else {
+ QueuedResponse qr;
+ qr.status = status;
+ qr.content_type = content_type;
+ qr.extra_headers = extra_headers;
+ qr.data = data;
+ queue_.push(qr);
+ }
+}
+
+void ChannelMember::SetWaitingSocket(DataSocket* ds) {
+ RTC_DCHECK_EQ(ds->method(), DataSocket::GET);
+ if (ds && !queue_.empty()) {
+ RTC_DCHECK(!waiting_socket_);
+ const QueuedResponse& response = queue_.front();
+ ds->Send(response.status, true, response.content_type,
+ response.extra_headers, response.data);
+ queue_.pop();
+ } else {
+ waiting_socket_ = ds;
+ }
+}
+
+//
+// PeerChannel
+//
+
+// static
+bool PeerChannel::IsPeerConnection(const DataSocket* ds) {
+ RTC_DCHECK(ds);
+ return (ds->method() == DataSocket::POST && ds->content_length() > 0) ||
+ (ds->method() == DataSocket::GET && ds->PathEquals("/sign_in"));
+}
+
+ChannelMember* PeerChannel::Lookup(DataSocket* ds) const {
+ RTC_DCHECK(ds);
+
+ if (ds->method() != DataSocket::GET && ds->method() != DataSocket::POST)
+ return NULL;
+
+ size_t i = 0;
+ for (; i < ARRAYSIZE(kRequestPaths); ++i) {
+ if (ds->PathEquals(kRequestPaths[i]))
+ break;
+ }
+
+ if (i == ARRAYSIZE(kRequestPaths))
+ return NULL;
+
+ std::string args(ds->request_arguments());
+ static const char kPeerId[] = "peer_id=";
+ size_t found = args.find(kPeerId);
+ if (found == std::string::npos)
+ return NULL;
+
+ int id = atoi(&args[found + ARRAYSIZE(kPeerId) - 1]);
+ Members::const_iterator iter = members_.begin();
+ for (; iter != members_.end(); ++iter) {
+ if (id == (*iter)->id()) {
+ if (i == kWait)
+ (*iter)->SetWaitingSocket(ds);
+ if (i == kSignOut)
+ (*iter)->set_disconnected();
+ return *iter;
+ }
+ }
+
+ return NULL;
+}
+
+ChannelMember* PeerChannel::IsTargetedRequest(const DataSocket* ds) const {
+ RTC_DCHECK(ds);
+ // Regardless of GET or POST, we look for the peer_id parameter
+ // only in the request_path.
+ const std::string& path = ds->request_path();
+ size_t args = path.find('?');
+ if (args == std::string::npos)
+ return NULL;
+ size_t found;
+ const char kTargetPeerIdParam[] = "to=";
+ do {
+ found = path.find(kTargetPeerIdParam, args);
+ if (found == std::string::npos)
+ return NULL;
+ if (found == (args + 1) || path[found - 1] == '&') {
+ found += ARRAYSIZE(kTargetPeerIdParam) - 1;
+ break;
+ }
+ args = found + ARRAYSIZE(kTargetPeerIdParam) - 1;
+ } while (true);
+ int id = atoi(&path[found]);
+ Members::const_iterator i = members_.begin();
+ for (; i != members_.end(); ++i) {
+ if ((*i)->id() == id) {
+ return *i;
+ }
+ }
+ return NULL;
+}
+
+bool PeerChannel::AddMember(DataSocket* ds) {
+ RTC_DCHECK(IsPeerConnection(ds));
+ ChannelMember* new_guy = new ChannelMember(ds);
+ Members failures;
+ BroadcastChangedState(*new_guy, &failures);
+ HandleDeliveryFailures(&failures);
+ members_.push_back(new_guy);
+
+ printf("New member added (total=%s): %s\n",
+ size_t2str(members_.size()).c_str(), new_guy->name().c_str());
+
+ // Let the newly connected peer know about other members of the channel.
+ std::string content_type;
+ std::string response = BuildResponseForNewMember(*new_guy, &content_type);
+ ds->Send("200 Added", true, content_type, new_guy->GetPeerIdHeader(),
+ response);
+ return true;
+}
+
+void PeerChannel::CloseAll() {
+ Members::const_iterator i = members_.begin();
+ for (; i != members_.end(); ++i) {
+ (*i)->QueueResponse("200 OK", "text/plain", "", "Server shutting down");
+ }
+ DeleteAll();
+}
+
+void PeerChannel::OnClosing(DataSocket* ds) {
+ for (Members::iterator i = members_.begin(); i != members_.end(); ++i) {
+ ChannelMember* m = (*i);
+ m->OnClosing(ds);
+ if (!m->connected()) {
+ i = members_.erase(i);
+ Members failures;
+ BroadcastChangedState(*m, &failures);
+ HandleDeliveryFailures(&failures);
+ delete m;
+ if (i == members_.end())
+ break;
+ }
+ }
+ printf("Total connected: %s\n", size_t2str(members_.size()).c_str());
+}
+
+void PeerChannel::CheckForTimeout() {
+ for (Members::iterator i = members_.begin(); i != members_.end(); ++i) {
+ ChannelMember* m = (*i);
+ if (m->TimedOut()) {
+ printf("Timeout: %s\n", m->name().c_str());
+ m->set_disconnected();
+ i = members_.erase(i);
+ Members failures;
+ BroadcastChangedState(*m, &failures);
+ HandleDeliveryFailures(&failures);
+ delete m;
+ if (i == members_.end())
+ break;
+ }
+ }
+}
+
+void PeerChannel::DeleteAll() {
+ for (Members::iterator i = members_.begin(); i != members_.end(); ++i)
+ delete (*i);
+ members_.clear();
+}
+
+void PeerChannel::BroadcastChangedState(const ChannelMember& member,
+ Members* delivery_failures) {
+ // This function should be called prior to DataSocket::Close().
+ RTC_DCHECK(delivery_failures);
+
+ if (!member.connected()) {
+ printf("Member disconnected: %s\n", member.name().c_str());
+ }
+
+ Members::iterator i = members_.begin();
+ for (; i != members_.end(); ++i) {
+ if (&member != (*i)) {
+ if (!(*i)->NotifyOfOtherMember(member)) {
+ (*i)->set_disconnected();
+ delivery_failures->push_back(*i);
+ i = members_.erase(i);
+ if (i == members_.end())
+ break;
+ }
+ }
+ }
+}
+
+void PeerChannel::HandleDeliveryFailures(Members* failures) {
+ RTC_DCHECK(failures);
+
+ while (!failures->empty()) {
+ Members::iterator i = failures->begin();
+ ChannelMember* member = *i;
+ RTC_DCHECK(!member->connected());
+ failures->erase(i);
+ BroadcastChangedState(*member, failures);
+ delete member;
+ }
+}
+
+// Builds a simple list of "name,id\n" entries for each member.
+std::string PeerChannel::BuildResponseForNewMember(const ChannelMember& member,
+ std::string* content_type) {
+ RTC_DCHECK(content_type);
+
+ *content_type = "text/plain";
+ // The peer itself will always be the first entry.
+ std::string response(member.GetEntry());
+ for (Members::iterator i = members_.begin(); i != members_.end(); ++i) {
+ if (member.id() != (*i)->id()) {
+ RTC_DCHECK((*i)->connected());
+ response += (*i)->GetEntry();
+ }
+ }
+
+ return response;
+}
diff --git a/third_party/libwebrtc/examples/peerconnection/server/peer_channel.h b/third_party/libwebrtc/examples/peerconnection/server/peer_channel.h
new file mode 100644
index 0000000000..c3624908ac
--- /dev/null
+++ b/third_party/libwebrtc/examples/peerconnection/server/peer_channel.h
@@ -0,0 +1,118 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef EXAMPLES_PEERCONNECTION_SERVER_PEER_CHANNEL_H_
+#define EXAMPLES_PEERCONNECTION_SERVER_PEER_CHANNEL_H_
+
+#include <time.h>
+
+#include <queue>
+#include <string>
+#include <vector>
+
+class DataSocket;
+
+// Represents a single peer connected to the server.
+class ChannelMember {
+ public:
+ explicit ChannelMember(DataSocket* socket);
+ ~ChannelMember();
+
+ bool connected() const { return connected_; }
+ int id() const { return id_; }
+ void set_disconnected() { connected_ = false; }
+ bool is_wait_request(DataSocket* ds) const;
+ const std::string& name() const { return name_; }
+
+ bool TimedOut();
+
+ std::string GetPeerIdHeader() const;
+
+ bool NotifyOfOtherMember(const ChannelMember& other);
+
+ // Returns a string in the form "name,id\n".
+ std::string GetEntry() const;
+
+ void ForwardRequestToPeer(DataSocket* ds, ChannelMember* peer);
+
+ void OnClosing(DataSocket* ds);
+
+ void QueueResponse(const std::string& status,
+ const std::string& content_type,
+ const std::string& extra_headers,
+ const std::string& data);
+
+ void SetWaitingSocket(DataSocket* ds);
+
+ protected:
+ struct QueuedResponse {
+ std::string status, content_type, extra_headers, data;
+ };
+
+ DataSocket* waiting_socket_;
+ int id_;
+ bool connected_;
+ time_t timestamp_;
+ std::string name_;
+ std::queue<QueuedResponse> queue_;
+ static int s_member_id_;
+};
+
+// Manages all currently connected peers.
+class PeerChannel {
+ public:
+ typedef std::vector<ChannelMember*> Members;
+
+ PeerChannel() {}
+
+ ~PeerChannel() { DeleteAll(); }
+
+ const Members& members() const { return members_; }
+
+ // Returns true if the request should be treated as a new ChannelMember
+ // request. Otherwise the request is not peerconnection related.
+ static bool IsPeerConnection(const DataSocket* ds);
+
+ // Finds a connected peer that's associated with the `ds` socket.
+ ChannelMember* Lookup(DataSocket* ds) const;
+
+ // Checks if the request has a "peer_id" parameter and if so, looks up the
+ // peer for which the request is targeted at.
+ ChannelMember* IsTargetedRequest(const DataSocket* ds) const;
+
+ // Adds a new ChannelMember instance to the list of connected peers and
+ // associates it with the socket.
+ bool AddMember(DataSocket* ds);
+
+ // Closes all connections and sends a "shutting down" message to all
+ // connected peers.
+ void CloseAll();
+
+ // Called when a socket was determined to be closing by the peer (or if the
+ // connection went dead).
+ void OnClosing(DataSocket* ds);
+
+ void CheckForTimeout();
+
+ protected:
+ void DeleteAll();
+ void BroadcastChangedState(const ChannelMember& member,
+ Members* delivery_failures);
+ void HandleDeliveryFailures(Members* failures);
+
+ // Builds a simple list of "name,id\n" entries for each member.
+ std::string BuildResponseForNewMember(const ChannelMember& member,
+ std::string* content_type);
+
+ protected:
+ Members members_;
+};
+
+#endif // EXAMPLES_PEERCONNECTION_SERVER_PEER_CHANNEL_H_
diff --git a/third_party/libwebrtc/examples/peerconnection/server/server_test.html b/third_party/libwebrtc/examples/peerconnection/server/server_test.html
new file mode 100644
index 0000000000..0a165f19d5
--- /dev/null
+++ b/third_party/libwebrtc/examples/peerconnection/server/server_test.html
@@ -0,0 +1,237 @@
+<html>
+<head>
+<title>PeerConnection server test page</title>
+
+<script>
+var request = null;
+var hangingGet = null;
+var localName;
+var server;
+var my_id = -1;
+var other_peers = {};
+var message_counter = 0;
+
+function trace(txt) {
+ var elem = document.getElementById("debug");
+ elem.innerHTML += txt + "<br>";
+}
+
+function handleServerNotification(data) {
+ trace("Server notification: " + data);
+ var parsed = data.split(',');
+ if (parseInt(parsed[2]) != 0)
+ other_peers[parseInt(parsed[1])] = parsed[0];
+}
+
+function handlePeerMessage(peer_id, data) {
+ ++message_counter;
+ var str = "Message from '" + other_peers[peer_id] + "'&nbsp;";
+ str += "<span id='toggle_" + message_counter + "' onclick='toggleMe(this);' ";
+ str += "style='cursor: pointer'>+</span><br>";
+ str += "<blockquote id='msg_" + message_counter + "' style='display:none'>";
+ str += data + "</blockquote>";
+ trace(str);
+ if (document.getElementById("loopback").checked) {
+ if (data.search("offer") != -1) {
+ // In loopback mode, if DTLS is enabled, notify the client to disable it.
+ // Otherwise replace the offer with an answer.
+ if (data.search("fingerprint") != -1)
+ data = data.replace("offer", "offer-loopback");
+ else
+ data = data.replace("offer", "answer");
+ }
+ sendToPeer(peer_id, data);
+ }
+}
+
+function GetIntHeader(r, name) {
+ var val = r.getResponseHeader(name);
+ return val != null && val.length ? parseInt(val) : -1;
+}
+
+function hangingGetCallback() {
+ try {
+ if (hangingGet.readyState != 4)
+ return;
+ if (hangingGet.status != 200) {
+ trace("server error: " + hangingGet.statusText);
+ disconnect();
+ } else {
+ var peer_id = GetIntHeader(hangingGet, "Pragma");
+ if (peer_id == my_id) {
+ handleServerNotification(hangingGet.responseText);
+ } else {
+ handlePeerMessage(peer_id, hangingGet.responseText);
+ }
+ }
+
+ if (hangingGet) {
+ hangingGet.abort();
+ hangingGet = null;
+ }
+
+ if (my_id != -1)
+ window.setTimeout(startHangingGet, 0);
+ } catch (e) {
+ trace("Hanging get error: " + e.description);
+ }
+}
+
+function startHangingGet() {
+ try {
+ hangingGet = new XMLHttpRequest();
+ hangingGet.onreadystatechange = hangingGetCallback;
+ hangingGet.ontimeout = onHangingGetTimeout;
+ hangingGet.open("GET", server + "/wait?peer_id=" + my_id, true);
+ hangingGet.send();
+ } catch (e) {
+ trace("error" + e.description);
+ }
+}
+
+function onHangingGetTimeout() {
+ trace("hanging get timeout. issuing again.");
+ hangingGet.abort();
+ hangingGet = null;
+ if (my_id != -1)
+ window.setTimeout(startHangingGet, 0);
+}
+
+function signInCallback() {
+ try {
+ if (request.readyState == 4) {
+ if (request.status == 200) {
+ var peers = request.responseText.split("\n");
+ my_id = parseInt(peers[0].split(',')[1]);
+ trace("My id: " + my_id);
+ for (var i = 1; i < peers.length; ++i) {
+ if (peers[i].length > 0) {
+ trace("Peer " + i + ": " + peers[i]);
+ var parsed = peers[i].split(',');
+ other_peers[parseInt(parsed[1])] = parsed[0];
+ }
+ }
+ startHangingGet();
+ request = null;
+ }
+ }
+ } catch (e) {
+ trace("error: " + e.description);
+ }
+}
+
+function signIn() {
+ try {
+ request = new XMLHttpRequest();
+ request.onreadystatechange = signInCallback;
+ request.open("GET", server + "/sign_in?" + localName, true);
+ request.send();
+ } catch (e) {
+ trace("error: " + e.description);
+ }
+}
+
+function sendToPeer(peer_id, data) {
+ if (my_id == -1) {
+ alert("Not connected");
+ return;
+ }
+ if (peer_id == my_id) {
+ alert("Can't send a message to oneself :)");
+ return;
+ }
+ var r = new XMLHttpRequest();
+ r.open("POST", server + "/message?peer_id=" + my_id + "&to=" + peer_id,
+ false);
+ r.setRequestHeader("Content-Type", "text/plain");
+ r.send(data);
+ r = null;
+}
+
+function connect() {
+ localName = document.getElementById("local").value.toLowerCase();
+ server = document.getElementById("server").value.toLowerCase();
+ if (localName.length == 0) {
+ alert("I need a name please.");
+ document.getElementById("local").focus();
+ } else {
+ document.getElementById("connect").disabled = true;
+ document.getElementById("disconnect").disabled = false;
+ document.getElementById("send").disabled = false;
+ signIn();
+ }
+}
+
+function disconnect() {
+ if (request) {
+ request.abort();
+ request = null;
+ }
+
+ if (hangingGet) {
+ hangingGet.abort();
+ hangingGet = null;
+ }
+
+ if (my_id != -1) {
+ request = new XMLHttpRequest();
+ request.open("GET", server + "/sign_out?peer_id=" + my_id, false);
+ request.send();
+ request = null;
+ my_id = -1;
+ }
+
+ document.getElementById("connect").disabled = false;
+ document.getElementById("disconnect").disabled = true;
+ document.getElementById("send").disabled = true;
+}
+
+window.onbeforeunload = disconnect;
+
+function send() {
+ var text = document.getElementById("message").value;
+ var peer_id = parseInt(document.getElementById("peer_id").value);
+ if (!text.length || peer_id == 0) {
+ alert("No text supplied or invalid peer id");
+ } else {
+ sendToPeer(peer_id, text);
+ }
+}
+
+function toggleMe(obj) {
+ var id = obj.id.replace("toggle", "msg");
+ var t = document.getElementById(id);
+ if (obj.innerText == "+") {
+ obj.innerText = "-";
+ t.style.display = "block";
+ } else {
+ obj.innerText = "+";
+ t.style.display = "none";
+ }
+}
+
+</script>
+
+</head>
+<body>
+Server: <input type="text" id="server" value="http://localhost:8888" /><br>
+<input type="checkbox" id="loopback" checked="checked"/> Loopback (just send
+received messages right back)<br>
+Your name: <input type="text" id="local" value="my_name"/>
+<button id="connect" onclick="connect();">Connect</button>
+<button disabled="true" id="disconnect"
+ onclick="disconnect();">Disconnect</button>
+<br>
+<table><tr><td>
+Target peer id: <input type="text" id="peer_id" size="3"/></td><td>
+Message: <input type="text" id="message"/></td><td>
+<button disabled="true" id="send" onclick="send();">Send</button>
+</td></tr></table>
+<button onclick="document.getElementById('debug').innerHTML='';">
+Clear log</button>
+
+<pre id="debug">
+</pre>
+<br><hr>
+</body>
+</html>
diff --git a/third_party/libwebrtc/examples/peerconnection/server/utils.cc b/third_party/libwebrtc/examples/peerconnection/server/utils.cc
new file mode 100644
index 0000000000..5e61e601d9
--- /dev/null
+++ b/third_party/libwebrtc/examples/peerconnection/server/utils.cc
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "examples/peerconnection/server/utils.h"
+
+#include <stdio.h>
+
+#include "rtc_base/string_encode.h"
+
+using rtc::ToString;
+
+std::string int2str(int i) {
+ return ToString(i);
+}
+
+std::string size_t2str(size_t i) {
+ return ToString(i);
+}
diff --git a/third_party/libwebrtc/examples/peerconnection/server/utils.h b/third_party/libwebrtc/examples/peerconnection/server/utils.h
new file mode 100644
index 0000000000..85c04a40e9
--- /dev/null
+++ b/third_party/libwebrtc/examples/peerconnection/server/utils.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef EXAMPLES_PEERCONNECTION_SERVER_UTILS_H_
+#define EXAMPLES_PEERCONNECTION_SERVER_UTILS_H_
+
+#include <stddef.h>
+
+#include <string>
+
+#ifndef ARRAYSIZE
+#define ARRAYSIZE(x) (sizeof(x) / sizeof(x[0]))
+#endif
+
+std::string int2str(int i);
+std::string size_t2str(size_t i);
+
+#endif // EXAMPLES_PEERCONNECTION_SERVER_UTILS_H_
diff --git a/third_party/libwebrtc/examples/stunprober/main.cc b/third_party/libwebrtc/examples/stunprober/main.cc
new file mode 100644
index 0000000000..3b3c06be8f
--- /dev/null
+++ b/third_party/libwebrtc/examples/stunprober/main.cc
@@ -0,0 +1,146 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+#include <set>
+#include <sstream>
+#include <string>
+#include <vector>
+
+#include "absl/flags/flag.h"
+#include "absl/flags/parse.h"
+#include "p2p/base/basic_packet_socket_factory.h"
+#include "p2p/stunprober/stun_prober.h"
+#include "rtc_base/helpers.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/network.h"
+#include "rtc_base/physical_socket_server.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/ssl_adapter.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/time_utils.h"
+#include "test/scoped_key_value_config.h"
+
+using stunprober::AsyncCallback;
+using stunprober::StunProber;
+
+ABSL_FLAG(int,
+ interval,
+ 10,
+ "Interval of consecutive stun pings in milliseconds");
+ABSL_FLAG(bool,
+ shared_socket,
+ false,
+ "Share socket mode for different remote IPs");
+ABSL_FLAG(int,
+ pings_per_ip,
+ 10,
+ "Number of consecutive stun pings to send for each IP");
+ABSL_FLAG(int,
+ timeout,
+ 1000,
+ "Milliseconds of wait after the last ping sent before exiting");
+ABSL_FLAG(
+ std::string,
+ servers,
+ "stun.l.google.com:19302,stun1.l.google.com:19302,stun2.l.google.com:19302",
+ "Comma separated STUN server addresses with ports");
+
+namespace {
+
+const char* PrintNatType(stunprober::NatType type) {
+ switch (type) {
+ case stunprober::NATTYPE_NONE:
+ return "Not behind a NAT";
+ case stunprober::NATTYPE_UNKNOWN:
+ return "Unknown NAT type";
+ case stunprober::NATTYPE_SYMMETRIC:
+ return "Symmetric NAT";
+ case stunprober::NATTYPE_NON_SYMMETRIC:
+ return "Non-Symmetric NAT";
+ default:
+ return "Invalid";
+ }
+}
+
+void PrintStats(StunProber* prober) {
+ StunProber::Stats stats;
+ if (!prober->GetStats(&stats)) {
+ RTC_LOG(LS_WARNING) << "Results are inconclusive.";
+ return;
+ }
+
+ RTC_LOG(LS_INFO) << "Shared Socket Mode: " << stats.shared_socket_mode;
+ RTC_LOG(LS_INFO) << "Requests sent: " << stats.num_request_sent;
+ RTC_LOG(LS_INFO) << "Responses received: " << stats.num_response_received;
+ RTC_LOG(LS_INFO) << "Target interval (ns): "
+ << stats.target_request_interval_ns;
+ RTC_LOG(LS_INFO) << "Actual interval (ns): "
+ << stats.actual_request_interval_ns;
+ RTC_LOG(LS_INFO) << "NAT Type: " << PrintNatType(stats.nat_type);
+ RTC_LOG(LS_INFO) << "Host IP: " << stats.host_ip;
+ RTC_LOG(LS_INFO) << "Server-reflexive ips: ";
+ for (auto& ip : stats.srflx_addrs) {
+ RTC_LOG(LS_INFO) << "\t" << ip;
+ }
+
+ RTC_LOG(LS_INFO) << "Success Precent: " << stats.success_percent;
+ RTC_LOG(LS_INFO) << "Response Latency:" << stats.average_rtt_ms;
+}
+
+void StopTrial(rtc::Thread* thread, StunProber* prober, int result) {
+ thread->Quit();
+ if (prober) {
+ RTC_LOG(LS_INFO) << "Result: " << result;
+ if (result == StunProber::SUCCESS) {
+ PrintStats(prober);
+ }
+ }
+}
+
+} // namespace
+
+int main(int argc, char* argv[]) {
+ absl::ParseCommandLine(argc, argv);
+
+ std::vector<rtc::SocketAddress> server_addresses;
+ std::istringstream servers(absl::GetFlag(FLAGS_servers));
+ std::string server;
+ while (getline(servers, server, ',')) {
+ rtc::SocketAddress addr;
+ if (!addr.FromString(server)) {
+ RTC_LOG(LS_ERROR) << "Parsing " << server << " failed.";
+ return -1;
+ }
+ server_addresses.push_back(addr);
+ }
+
+ rtc::InitializeSSL();
+ rtc::InitRandom(rtc::Time32());
+ webrtc::test::ScopedKeyValueConfig field_trials;
+ rtc::PhysicalSocketServer socket_server;
+ rtc::AutoSocketServerThread thread(&socket_server);
+ auto socket_factory =
+ std::make_unique<rtc::BasicPacketSocketFactory>(&socket_server);
+ std::unique_ptr<rtc::BasicNetworkManager> network_manager(
+ new rtc::BasicNetworkManager(&socket_server, &field_trials));
+ std::vector<const rtc::Network*> networks = network_manager->GetNetworks();
+ auto prober = std::make_unique<StunProber>(socket_factory.get(),
+ rtc::Thread::Current(), networks);
+ auto finish_callback = [&thread](StunProber* prober, int result) {
+ StopTrial(&thread, prober, result);
+ };
+ prober->Start(server_addresses, absl::GetFlag(FLAGS_shared_socket),
+ absl::GetFlag(FLAGS_interval),
+ absl::GetFlag(FLAGS_pings_per_ip), absl::GetFlag(FLAGS_timeout),
+ AsyncCallback(finish_callback));
+ thread.Run();
+ return 0;
+}
diff --git a/third_party/libwebrtc/examples/stunserver/stunserver_main.cc b/third_party/libwebrtc/examples/stunserver/stunserver_main.cc
new file mode 100644
index 0000000000..8180069bf0
--- /dev/null
+++ b/third_party/libwebrtc/examples/stunserver/stunserver_main.cc
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include <iostream>
+
+#include "p2p/base/stun_server.h"
+#include "rtc_base/async_udp_socket.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/socket_server.h"
+#include "rtc_base/thread.h"
+
+using cricket::StunServer;
+
+int main(int argc, char* argv[]) {
+ if (argc != 2) {
+ std::cerr << "usage: stunserver address" << std::endl;
+ return 1;
+ }
+
+ rtc::SocketAddress server_addr;
+ if (!server_addr.FromString(argv[1])) {
+ std::cerr << "Unable to parse IP address: " << argv[1];
+ return 1;
+ }
+
+ rtc::Thread* pthMain = rtc::Thread::Current();
+
+ rtc::AsyncUDPSocket* server_socket =
+ rtc::AsyncUDPSocket::Create(pthMain->socketserver(), server_addr);
+ if (!server_socket) {
+ std::cerr << "Failed to create a UDP socket" << std::endl;
+ return 1;
+ }
+
+ StunServer* server = new StunServer(server_socket);
+
+ std::cout << "Listening at " << server_addr.ToString() << std::endl;
+
+ pthMain->Run();
+
+ delete server;
+ return 0;
+}
diff --git a/third_party/libwebrtc/examples/turnserver/read_auth_file.cc b/third_party/libwebrtc/examples/turnserver/read_auth_file.cc
new file mode 100644
index 0000000000..4b0b21b8ae
--- /dev/null
+++ b/third_party/libwebrtc/examples/turnserver/read_auth_file.cc
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "examples/turnserver/read_auth_file.h"
+
+#include <stddef.h>
+
+#include "absl/strings/string_view.h"
+#include "api/array_view.h"
+#include "rtc_base/string_encode.h"
+
+namespace webrtc_examples {
+
+std::map<std::string, std::string> ReadAuthFile(std::istream* s) {
+ std::map<std::string, std::string> name_to_key;
+ for (std::string line; std::getline(*s, line);) {
+ const size_t sep = line.find('=');
+ if (sep == std::string::npos)
+ continue;
+ char buf[32];
+ size_t len = rtc::hex_decode(rtc::ArrayView<char>(buf),
+ absl::string_view(line).substr(sep + 1));
+ if (len > 0) {
+ name_to_key.emplace(line.substr(0, sep), std::string(buf, len));
+ }
+ }
+ return name_to_key;
+}
+
+} // namespace webrtc_examples
diff --git a/third_party/libwebrtc/examples/turnserver/read_auth_file.h b/third_party/libwebrtc/examples/turnserver/read_auth_file.h
new file mode 100644
index 0000000000..1c139c9924
--- /dev/null
+++ b/third_party/libwebrtc/examples/turnserver/read_auth_file.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef EXAMPLES_TURNSERVER_READ_AUTH_FILE_H_
+#define EXAMPLES_TURNSERVER_READ_AUTH_FILE_H_
+
+#include <istream>
+#include <map>
+#include <string>
+
+namespace webrtc_examples {
+
+std::map<std::string, std::string> ReadAuthFile(std::istream* s);
+
+} // namespace webrtc_examples
+
+#endif // EXAMPLES_TURNSERVER_READ_AUTH_FILE_H_
diff --git a/third_party/libwebrtc/examples/turnserver/read_auth_file_unittest.cc b/third_party/libwebrtc/examples/turnserver/read_auth_file_unittest.cc
new file mode 100644
index 0000000000..23b026429b
--- /dev/null
+++ b/third_party/libwebrtc/examples/turnserver/read_auth_file_unittest.cc
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "examples/turnserver/read_auth_file.h"
+
+#include <sstream>
+
+#include "test/gtest.h"
+
+namespace webrtc_examples {
+
+TEST(ReadAuthFile, HandlesEmptyFile) {
+ std::istringstream empty;
+ auto map = ReadAuthFile(&empty);
+ EXPECT_TRUE(map.empty());
+}
+
+TEST(ReadAuthFile, RecognizesValidUser) {
+ std::istringstream file("foo=deadbeaf\n");
+ auto map = ReadAuthFile(&file);
+ ASSERT_NE(map.find("foo"), map.end());
+ EXPECT_EQ(map["foo"], "\xde\xad\xbe\xaf");
+}
+
+TEST(ReadAuthFile, EmptyValueForInvalidHex) {
+ std::istringstream file(
+ "foo=deadbeaf\n"
+ "bar=xxxxinvalidhex\n"
+ "baz=cafe\n");
+ auto map = ReadAuthFile(&file);
+ ASSERT_NE(map.find("foo"), map.end());
+ EXPECT_EQ(map["foo"], "\xde\xad\xbe\xaf");
+ EXPECT_EQ(map.find("bar"), map.end());
+ ASSERT_NE(map.find("baz"), map.end());
+ EXPECT_EQ(map["baz"], "\xca\xfe");
+}
+
+} // namespace webrtc_examples
diff --git a/third_party/libwebrtc/examples/turnserver/turnserver_main.cc b/third_party/libwebrtc/examples/turnserver/turnserver_main.cc
new file mode 100644
index 0000000000..8db6162306
--- /dev/null
+++ b/third_party/libwebrtc/examples/turnserver/turnserver_main.cc
@@ -0,0 +1,101 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <fstream>
+#include <iostream>
+#include <map>
+#include <string>
+#include <utility>
+
+#include "absl/strings/string_view.h"
+#include "examples/turnserver/read_auth_file.h"
+#include "p2p/base/basic_packet_socket_factory.h"
+#include "p2p/base/port_interface.h"
+#include "p2p/base/turn_server.h"
+#include "rtc_base/async_udp_socket.h"
+#include "rtc_base/ip_address.h"
+#include "rtc_base/physical_socket_server.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/thread.h"
+
+namespace {
+const char kSoftware[] = "libjingle TurnServer";
+
+class TurnFileAuth : public cricket::TurnAuthInterface {
+ public:
+ explicit TurnFileAuth(std::map<std::string, std::string> name_to_key)
+ : name_to_key_(std::move(name_to_key)) {}
+
+ virtual bool GetKey(absl::string_view username,
+ absl::string_view realm,
+ std::string* key) {
+ // File is stored as lines of <username>=<HA1>.
+ // Generate HA1 via "echo -n "<username>:<realm>:<password>" | md5sum"
+ auto it = name_to_key_.find(std::string(username));
+ if (it == name_to_key_.end())
+ return false;
+ *key = it->second;
+ return true;
+ }
+
+ private:
+ const std::map<std::string, std::string> name_to_key_;
+};
+
+} // namespace
+
+int main(int argc, char* argv[]) {
+ if (argc != 5) {
+ std::cerr << "usage: turnserver int-addr ext-ip realm auth-file"
+ << std::endl;
+ return 1;
+ }
+
+ rtc::SocketAddress int_addr;
+ if (!int_addr.FromString(argv[1])) {
+ std::cerr << "Unable to parse IP address: " << argv[1] << std::endl;
+ return 1;
+ }
+
+ rtc::IPAddress ext_addr;
+ if (!IPFromString(argv[2], &ext_addr)) {
+ std::cerr << "Unable to parse IP address: " << argv[2] << std::endl;
+ return 1;
+ }
+
+ rtc::PhysicalSocketServer socket_server;
+ rtc::AutoSocketServerThread main(&socket_server);
+ rtc::AsyncUDPSocket* int_socket =
+ rtc::AsyncUDPSocket::Create(&socket_server, int_addr);
+ if (!int_socket) {
+ std::cerr << "Failed to create a UDP socket bound at" << int_addr.ToString()
+ << std::endl;
+ return 1;
+ }
+
+ cricket::TurnServer server(&main);
+ std::fstream auth_file(argv[4], std::fstream::in);
+
+ TurnFileAuth auth(auth_file.is_open()
+ ? webrtc_examples::ReadAuthFile(&auth_file)
+ : std::map<std::string, std::string>());
+ server.set_realm(argv[3]);
+ server.set_software(kSoftware);
+ server.set_auth_hook(&auth);
+ server.AddInternalSocket(int_socket, cricket::PROTO_UDP);
+ server.SetExternalSocketFactory(
+ new rtc::BasicPacketSocketFactory(&socket_server),
+ rtc::SocketAddress(ext_addr, 0));
+
+ std::cout << "Listening internally at " << int_addr.ToString() << std::endl;
+
+ main.Run();
+ return 0;
+}
diff --git a/third_party/libwebrtc/examples/unityplugin/ANDROID_INSTRUCTION b/third_party/libwebrtc/examples/unityplugin/ANDROID_INSTRUCTION
new file mode 100644
index 0000000000..d5f7399bca
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/ANDROID_INSTRUCTION
@@ -0,0 +1,33 @@
+Instruction of running webrtc_unity_plugin on Android Unity
+
+1. On Linux machine, compile target webrtc_unity_plugin.
+ Checkout WebRTC codebase: fetch --nohooks webrtc_android
+ If you already have a checkout for linux, add target_os=”android” into .gclient file.
+ Run gclient sync
+ Run gn args out/Android, and again set target_os=”android” in the args.gn
+ Run ninja -C out/Android webrtc_unity_plugin
+
+2. On Linux machine, build target libwebrtc_unity under webrtc checkout. This is the java code for webrtc to work on Android.
+
+3. Copy libwebrtc_unity.jar and libwebrtc_unity_plugin.so into Unity project folder, under Assets/Plugins/Android folder.
+
+4. Rename libwebrtc_unity_plugin.so to libjingle_peerconnection_so.so. This is hacky, and the purpose is to let the java code in libwebrtc_unity.jar to find their JNI implementations. Simultaneously, in your C# wrapper script for the native plugin libjingle_peerconnection_so.so, the dll_path should be set to “jingle_peerconnection_so”.
+
+5. In the Unity Main Scene’s Start method, write the following code to initialize the Java environment for webrtc (otherwise, webrtc will not be able to access audio device or camera from C++ code):
+
+#if UNITY_ANDROID
+ AndroidJavaClass playerClass = new AndroidJavaClass("com.unity3d.player.UnityPlayer");
+ AndroidJavaObject activity = playerClass.GetStatic<AndroidJavaObject>("currentActivity");
+ AndroidJavaClass utilityClass = new AndroidJavaClass("org.webrtc.UnityUtility");
+ utilityClass.CallStatic("InitializePeerConncectionFactory", new object[1] { activity });
+#endif
+
+6. Compile the unity project into an APK, and decompile the apk using apktool that you can download from https://ibotpeaches.github.io/Apktool/
+ Run apktool d apkname.apk.
+Then copy the AndroidManifest.xml in the decompiled folder to the Assets/Plugins/Android folder, and add two lines:
+ <uses-permission android:name="android.permission.RECORD_AUDIO" />
+ <uses-permission android:name="android.permission.CAMERA" />
+
+The purpose of using apktool is to get a well-written android manifest xml file. If you know how to write manifest file from scratch, you can skip using apktool.
+
+7. Compile the unity project into an APK again and deploy it to an android device.
diff --git a/third_party/libwebrtc/examples/unityplugin/DEPS b/third_party/libwebrtc/examples/unityplugin/DEPS
new file mode 100644
index 0000000000..604005ac73
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/DEPS
@@ -0,0 +1,4 @@
+include_rules = [
+ "+modules/utility",
+ "+sdk",
+]
diff --git a/third_party/libwebrtc/examples/unityplugin/README b/third_party/libwebrtc/examples/unityplugin/README
new file mode 100644
index 0000000000..da8f07aa11
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/README
@@ -0,0 +1,309 @@
+This directory contains an example Unity native plugin for Windows OS and Android.
+
+The APIs use Platform Invoke (P/Invoke) technology as required by Unity native plugin.
+This plugin dll can also be used by Windows C# applications other than Unity.
+
+For detailed build instruction on Android, see ANDROID_INSTRUCTION
+
+An example of wrapping native plugin into a C# managed class in Unity is given as following:
+
+using System;
+using System.Collections.Generic;
+using System.Runtime.InteropServices;
+
+namespace SimplePeerConnectionM {
+ // A class for ice candidate.
+ public class IceCandidate {
+ public IceCandidate(string candidate, int sdpMlineIndex, string sdpMid) {
+ mCandidate = candidate;
+ mSdpMlineIndex = sdpMlineIndex;
+ mSdpMid = sdpMid;
+ }
+ string mCandidate;
+ int mSdpMlineIndex;
+ string mSdpMid;
+
+ public string Candidate {
+ get { return mCandidate; }
+ set { mCandidate = value; }
+ }
+
+ public int SdpMlineIndex {
+ get { return mSdpMlineIndex; }
+ set { mSdpMlineIndex = value; }
+ }
+
+ public string SdpMid {
+ get { return mSdpMid; }
+ set { mSdpMid = value; }
+ }
+ }
+
+ // A managed wrapper up class for the native c style peer connection APIs.
+ public class PeerConnectionM {
+ private const string dllPath = "webrtc_unity_plugin";
+
+ //create a peerconnection with turn servers
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern int CreatePeerConnection(string[] turnUrls, int noOfUrls,
+ string username, string credential);
+
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool ClosePeerConnection(int peerConnectionId);
+
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool AddStream(int peerConnectionId, bool audioOnly);
+
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool AddDataChannel(int peerConnectionId);
+
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool CreateOffer(int peerConnectionId);
+
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool CreateAnswer(int peerConnectionId);
+
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool SendDataViaDataChannel(int peerConnectionId, string data);
+
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool SetAudioControl(int peerConnectionId, bool isMute, bool isRecord);
+
+ [UnmanagedFunctionPointer(CallingConvention.Cdecl)]
+ private delegate void LocalDataChannelReadyInternalDelegate();
+ public delegate void LocalDataChannelReadyDelegate(int id);
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool RegisterOnLocalDataChannelReady(
+ int peerConnectionId, LocalDataChannelReadyInternalDelegate callback);
+
+ [UnmanagedFunctionPointer(CallingConvention.Cdecl)]
+ private delegate void DataFromDataChannelReadyInternalDelegate(string s);
+ public delegate void DataFromDataChannelReadyDelegate(int id, string s);
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool RegisterOnDataFromDataChannelReady(
+ int peerConnectionId, DataFromDataChannelReadyInternalDelegate callback);
+
+ [UnmanagedFunctionPointer(CallingConvention.Cdecl)]
+ private delegate void FailureMessageInternalDelegate(string msg);
+ public delegate void FailureMessageDelegate(int id, string msg);
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool RegisterOnFailure(int peerConnectionId,
+ FailureMessageInternalDelegate callback);
+
+ [UnmanagedFunctionPointer(CallingConvention.Cdecl)]
+ private delegate void AudioBusReadyInternalDelegate(IntPtr data, int bitsPerSample,
+ int sampleRate, int numberOfChannels, int numberOfFrames);
+ public delegate void AudioBusReadyDelegate(int id, IntPtr data, int bitsPerSample,
+ int sampleRate, int numberOfChannels, int numberOfFrames);
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool RegisterOnAudioBusReady(int peerConnectionId,
+ AudioBusReadyInternalDelegate callback);
+
+ // Video callbacks.
+ [UnmanagedFunctionPointer(CallingConvention.Cdecl)]
+ private delegate void I420FrameReadyInternalDelegate(
+ IntPtr dataY, IntPtr dataU, IntPtr dataV,
+ int strideY, int strideU, int strideV,
+ uint width, uint height);
+ public delegate void I420FrameReadyDelegate(int id,
+ IntPtr dataY, IntPtr dataU, IntPtr dataV,
+ int strideY, int strideU, int strideV,
+ uint width, uint height);
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool RegisterOnLocalI420FrameReady(int peerConnectionId,
+ I420FrameReadyInternalDelegate callback);
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool RegisterOnRemoteI420FrameReady(int peerConnectionId,
+ I420FrameReadyInternalDelegate callback);
+
+ [UnmanagedFunctionPointer(CallingConvention.Cdecl)]
+ private delegate void LocalSdpReadytoSendInternalDelegate(string type, string sdp);
+ public delegate void LocalSdpReadytoSendDelegate(int id, string type, string sdp);
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool RegisterOnLocalSdpReadytoSend(int peerConnectionId,
+ LocalSdpReadytoSendInternalDelegate callback);
+
+ [UnmanagedFunctionPointer(CallingConvention.Cdecl)]
+ private delegate void IceCandidateReadytoSendInternalDelegate(
+ string candidate, int sdpMlineIndex, string sdpMid);
+ public delegate void IceCandidateReadytoSendDelegate(
+ int id, string candidate, int sdpMlineIndex, string sdpMid);
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool RegisterOnIceCandidateReadytoSend(
+ int peerConnectionId, IceCandidateReadytoSendInternalDelegate callback);
+
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool SetRemoteDescription(int peerConnectionId, string type, string sdp);
+
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool AddIceCandidate(int peerConnectionId, string sdp,
+ int sdpMlineindex, string sdpMid);
+
+ public PeerConnectionM(List<string> turnUrls, string username, string credential) {
+ string[] urls = turnUrls != null ? turnUrls.ToArray() : null;
+ int length = turnUrls != null ? turnUrls.Count : 0;
+ mPeerConnectionId = CreatePeerConnection(urls, length, username, credential);
+ RegisterCallbacks();
+ }
+
+ public void ClosePeerConnection() {
+ ClosePeerConnection(mPeerConnectionId);
+ mPeerConnectionId = -1;
+ }
+
+ // Return -1 if Peerconnection is not available.
+ public int GetUniqueId() {
+ return mPeerConnectionId;
+ }
+
+ public void AddStream(bool audioOnly) {
+ AddStream(mPeerConnectionId, audioOnly);
+ }
+
+ public void AddDataChannel() {
+ AddDataChannel(mPeerConnectionId);
+ }
+
+ public void CreateOffer() {
+ CreateOffer(mPeerConnectionId);
+ }
+
+ public void CreateAnswer() {
+ CreateAnswer(mPeerConnectionId);
+ }
+
+ public void SendDataViaDataChannel(string data) {
+ SendDataViaDataChannel(mPeerConnectionId, data);
+ }
+
+ public void SetAudioControl(bool isMute, bool isRecord) {
+ SetAudioControl(mPeerConnectionId, isMute, isRecord);
+ }
+
+ public void SetRemoteDescription(string type, string sdp) {
+ SetRemoteDescription(mPeerConnectionId, type, sdp);
+ }
+
+ public void AddIceCandidate(string candidate, int sdpMlineindex, string sdpMid) {
+ AddIceCandidate(mPeerConnectionId, candidate, sdpMlineindex, sdpMid);
+ }
+
+ private void RegisterCallbacks() {
+ localDataChannelReadyDelegate = new LocalDataChannelReadyInternalDelegate(
+ RaiseLocalDataChannelReady);
+ RegisterOnLocalDataChannelReady(mPeerConnectionId, localDataChannelReadyDelegate);
+
+ dataFromDataChannelReadyDelegate = new DataFromDataChannelReadyInternalDelegate(
+ RaiseDataFromDataChannelReady);
+ RegisterOnDataFromDataChannelReady(mPeerConnectionId, dataFromDataChannelReadyDelegate);
+
+ failureMessageDelegate = new FailureMessageInternalDelegate(RaiseFailureMessage);
+ RegisterOnFailure(mPeerConnectionId, failureMessageDelegate);
+
+ audioBusReadyDelegate = new AudioBusReadyInternalDelegate(RaiseAudioBusReady);
+ RegisterOnAudioBusReady(mPeerConnectionId, audioBusReadyDelegate);
+
+ localI420FrameReadyDelegate = new I420FrameReadyInternalDelegate(
+ RaiseLocalVideoFrameReady);
+ RegisterOnLocalI420FrameReady(mPeerConnectionId, localI420FrameReadyDelegate);
+
+ remoteI420FrameReadyDelegate = new I420FrameReadyInternalDelegate(
+ RaiseRemoteVideoFrameReady);
+ RegisterOnRemoteI420FrameReady(mPeerConnectionId, remoteI420FrameReadyDelegate);
+
+ localSdpReadytoSendDelegate = new LocalSdpReadytoSendInternalDelegate(
+ RaiseLocalSdpReadytoSend);
+ RegisterOnLocalSdpReadytoSend(mPeerConnectionId, localSdpReadytoSendDelegate);
+
+ iceCandidateReadytoSendDelegate =
+ new IceCandidateReadytoSendInternalDelegate(RaiseIceCandidateReadytoSend);
+ RegisterOnIceCandidateReadytoSend(
+ mPeerConnectionId, iceCandidateReadytoSendDelegate);
+ }
+
+ private void RaiseLocalDataChannelReady() {
+ if (OnLocalDataChannelReady != null)
+ OnLocalDataChannelReady(mPeerConnectionId);
+ }
+
+ private void RaiseDataFromDataChannelReady(string data) {
+ if (OnDataFromDataChannelReady != null)
+ OnDataFromDataChannelReady(mPeerConnectionId, data);
+ }
+
+ private void RaiseFailureMessage(string msg) {
+ if (OnFailureMessage != null)
+ OnFailureMessage(mPeerConnectionId, msg);
+ }
+
+ private void RaiseAudioBusReady(IntPtr data, int bitsPerSample,
+ int sampleRate, int numberOfChannels, int numberOfFrames) {
+ if (OnAudioBusReady != null)
+ OnAudioBusReady(mPeerConnectionId, data, bitsPerSample, sampleRate,
+ numberOfChannels, numberOfFrames);
+ }
+
+ private void RaiseLocalVideoFrameReady(
+ IntPtr dataY, IntPtr dataU, IntPtr dataV,
+ int strideY, int strideU, int strideV,
+ uint width, uint height) {
+ if (OnLocalVideoFrameReady != null)
+ OnLocalVideoFrameReady(mPeerConnectionId, dataY, dataU, dataV, strideY, strideU, strideV,
+ width, height);
+ }
+
+ private void RaiseRemoteVideoFrameReady(
+ IntPtr dataY, IntPtr dataU, IntPtr dataV,
+ int strideY, int strideU, int strideV,
+ uint width, uint height) {
+ if (OnRemoteVideoFrameReady != null)
+ OnRemoteVideoFrameReady(mPeerConnectionId, dataY, dataU, dataV, strideY, strideU, strideV,
+ width, height);
+ }
+
+
+ private void RaiseLocalSdpReadytoSend(string type, string sdp) {
+ if (OnLocalSdpReadytoSend != null)
+ OnLocalSdpReadytoSend(mPeerConnectionId, type, sdp);
+ }
+
+ private void RaiseIceCandidateReadytoSend(string candidate, int sdpMlineIndex, string sdpMid) {
+ if (OnIceCandidateReadytoSend != null)
+ OnIceCandidateReadytoSend(mPeerConnectionId, candidate, sdpMlineIndex, sdpMid);
+ }
+
+ public void AddQueuedIceCandidate(List<IceCandidate> iceCandidateQueue) {
+ if (iceCandidateQueue != null) {
+ foreach (IceCandidate ic in iceCandidateQueue) {
+ AddIceCandidate(mPeerConnectionId, ic.Candidate, ic.SdpMlineIndex, ic.SdpMid);
+ }
+ }
+ }
+
+ private LocalDataChannelReadyInternalDelegate localDataChannelReadyDelegate = null;
+ public event LocalDataChannelReadyDelegate OnLocalDataChannelReady;
+
+ private DataFromDataChannelReadyInternalDelegate dataFromDataChannelReadyDelegate = null;
+ public event DataFromDataChannelReadyDelegate OnDataFromDataChannelReady;
+
+ private FailureMessageInternalDelegate failureMessageDelegate = null;
+ public event FailureMessageDelegate OnFailureMessage;
+
+ private AudioBusReadyInternalDelegate audioBusReadyDelegate = null;
+ public event AudioBusReadyDelegate OnAudioBusReady;
+
+ private I420FrameReadyInternalDelegate localI420FrameReadyDelegate = null;
+ public event I420FrameReadyDelegate OnLocalVideoFrameReady;
+
+ private I420FrameReadyInternalDelegate remoteI420FrameReadyDelegate = null;
+ public event I420FrameReadyDelegate OnRemoteVideoFrameReady;
+
+ private LocalSdpReadytoSendInternalDelegate localSdpReadytoSendDelegate = null;
+ public event LocalSdpReadytoSendDelegate OnLocalSdpReadytoSend;
+
+ private IceCandidateReadytoSendInternalDelegate iceCandidateReadytoSendDelegate = null;
+ public event IceCandidateReadytoSendDelegate OnIceCandidateReadytoSend;
+
+ private int mPeerConnectionId = -1;
+ }
+}
diff --git a/third_party/libwebrtc/examples/unityplugin/class_reference_holder.cc b/third_party/libwebrtc/examples/unityplugin/class_reference_holder.cc
new file mode 100644
index 0000000000..00ca772e76
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/class_reference_holder.cc
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "examples/unityplugin/class_reference_holder.h"
+
+#include <utility>
+
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace unity_plugin {
+
+// ClassReferenceHolder holds global reference to Java classes in app/webrtc.
+class ClassReferenceHolder {
+ public:
+ explicit ClassReferenceHolder(JNIEnv* jni);
+ ~ClassReferenceHolder();
+
+ void FreeReferences(JNIEnv* jni);
+ jclass GetClass(const std::string& name);
+
+ void LoadClass(JNIEnv* jni, const std::string& name);
+
+ private:
+ std::map<std::string, jclass> classes_;
+};
+
+// Allocated in LoadGlobalClassReferenceHolder(),
+// freed in FreeGlobalClassReferenceHolder().
+static ClassReferenceHolder* g_class_reference_holder = nullptr;
+
+void LoadGlobalClassReferenceHolder() {
+ RTC_CHECK(g_class_reference_holder == nullptr);
+ g_class_reference_holder = new ClassReferenceHolder(webrtc::jni::GetEnv());
+}
+
+void FreeGlobalClassReferenceHolder() {
+ g_class_reference_holder->FreeReferences(
+ webrtc::jni::AttachCurrentThreadIfNeeded());
+ delete g_class_reference_holder;
+ g_class_reference_holder = nullptr;
+}
+
+ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
+ LoadClass(jni, "org/webrtc/UnityUtility");
+}
+
+ClassReferenceHolder::~ClassReferenceHolder() {
+ RTC_CHECK(classes_.empty()) << "Must call FreeReferences() before dtor!";
+}
+
+void ClassReferenceHolder::FreeReferences(JNIEnv* jni) {
+ for (std::map<std::string, jclass>::const_iterator it = classes_.begin();
+ it != classes_.end(); ++it) {
+ jni->DeleteGlobalRef(it->second);
+ }
+ classes_.clear();
+}
+
+jclass ClassReferenceHolder::GetClass(const std::string& name) {
+ std::map<std::string, jclass>::iterator it = classes_.find(name);
+ RTC_CHECK(it != classes_.end()) << "Unexpected GetClass() call for: " << name;
+ return it->second;
+}
+
+void ClassReferenceHolder::LoadClass(JNIEnv* jni, const std::string& name) {
+ jclass localRef = jni->FindClass(name.c_str());
+ CHECK_EXCEPTION(jni) << "error during FindClass: " << name;
+ RTC_CHECK(localRef) << name;
+ jclass globalRef = reinterpret_cast<jclass>(jni->NewGlobalRef(localRef));
+ CHECK_EXCEPTION(jni) << "error during NewGlobalRef: " << name;
+ RTC_CHECK(globalRef) << name;
+ bool inserted = classes_.insert(std::make_pair(name, globalRef)).second;
+ RTC_CHECK(inserted) << "Duplicate class name: " << name;
+}
+
+// Returns a global reference guaranteed to be valid for the lifetime of the
+// process.
+jclass FindClass(JNIEnv* jni, const char* name) {
+ return g_class_reference_holder->GetClass(name);
+}
+
+} // namespace unity_plugin
diff --git a/third_party/libwebrtc/examples/unityplugin/class_reference_holder.h b/third_party/libwebrtc/examples/unityplugin/class_reference_holder.h
new file mode 100644
index 0000000000..884d471ceb
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/class_reference_holder.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This is a supplement of webrtc::jni::ClassReferenceHolder.
+// The purpose of this ClassReferenceHolder is to load the example
+// specific java class into JNI c++ side, so that our c++ code can
+// call those java functions.
+
+#ifndef EXAMPLES_UNITYPLUGIN_CLASS_REFERENCE_HOLDER_H_
+#define EXAMPLES_UNITYPLUGIN_CLASS_REFERENCE_HOLDER_H_
+
+#include <jni.h>
+
+#include <map>
+#include <string>
+#include <vector>
+
+namespace unity_plugin {
+
+// LoadGlobalClassReferenceHolder must be called in JNI_OnLoad.
+void LoadGlobalClassReferenceHolder();
+// FreeGlobalClassReferenceHolder must be called in JNI_UnLoad.
+void FreeGlobalClassReferenceHolder();
+
+// Returns a global reference guaranteed to be valid for the lifetime of the
+// process.
+jclass FindClass(JNIEnv* jni, const char* name);
+
+} // namespace unity_plugin
+
+#endif // EXAMPLES_UNITYPLUGIN_CLASS_REFERENCE_HOLDER_H_
diff --git a/third_party/libwebrtc/examples/unityplugin/java/src/org/webrtc/UnityUtility.java b/third_party/libwebrtc/examples/unityplugin/java/src/org/webrtc/UnityUtility.java
new file mode 100644
index 0000000000..bd8bbfa449
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/java/src/org/webrtc/UnityUtility.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import androidx.annotation.Nullable;
+
+public class UnityUtility {
+ private static final String VIDEO_CAPTURER_THREAD_NAME = "VideoCapturerThread";
+
+ public static SurfaceTextureHelper LoadSurfaceTextureHelper() {
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create(VIDEO_CAPTURER_THREAD_NAME, null);
+ return surfaceTextureHelper;
+ }
+
+ private static boolean useCamera2() {
+ return Camera2Enumerator.isSupported(ContextUtils.getApplicationContext());
+ }
+
+ private static @Nullable VideoCapturer createCameraCapturer(CameraEnumerator enumerator) {
+ final String[] deviceNames = enumerator.getDeviceNames();
+
+ for (String deviceName : deviceNames) {
+ if (enumerator.isFrontFacing(deviceName)) {
+ VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
+
+ if (videoCapturer != null) {
+ return videoCapturer;
+ }
+ }
+ }
+
+ return null;
+ }
+
+ public static VideoCapturer LinkCamera(
+ long nativeTrackSource, SurfaceTextureHelper surfaceTextureHelper) {
+ VideoCapturer capturer =
+ createCameraCapturer(new Camera2Enumerator(ContextUtils.getApplicationContext()));
+
+ VideoSource videoSource = new VideoSource(nativeTrackSource);
+
+ capturer.initialize(surfaceTextureHelper, ContextUtils.getApplicationContext(),
+ videoSource.getCapturerObserver());
+
+ capturer.startCapture(720, 480, 30);
+ return capturer;
+ }
+
+ public static void StopCamera(VideoCapturer camera) throws InterruptedException {
+ camera.stopCapture();
+ camera.dispose();
+ }
+
+ public static void InitializePeerConncectionFactory(Context context) throws InterruptedException {
+ PeerConnectionFactory.initialize(
+ PeerConnectionFactory.InitializationOptions.builder(context).createInitializationOptions());
+ }
+}
diff --git a/third_party/libwebrtc/examples/unityplugin/jni_onload.cc b/third_party/libwebrtc/examples/unityplugin/jni_onload.cc
new file mode 100644
index 0000000000..b9c92d5ef4
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/jni_onload.cc
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+#undef JNIEXPORT
+#define JNIEXPORT __attribute__((visibility("default")))
+
+#include "examples/unityplugin/class_reference_holder.h"
+#include "rtc_base/ssl_adapter.h"
+#include "sdk/android/native_api/jni/class_loader.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM* jvm, void* reserved) {
+ jint ret = InitGlobalJniVariables(jvm);
+ RTC_DCHECK_GE(ret, 0);
+ if (ret < 0)
+ return -1;
+
+ RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()";
+ webrtc::InitClassLoader(GetEnv());
+ unity_plugin::LoadGlobalClassReferenceHolder();
+
+ return ret;
+}
+
+extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM* jvm, void* reserved) {
+ unity_plugin::FreeGlobalClassReferenceHolder();
+ RTC_CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()";
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/examples/unityplugin/simple_peer_connection.cc b/third_party/libwebrtc/examples/unityplugin/simple_peer_connection.cc
new file mode 100644
index 0000000000..de49d5cd07
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/simple_peer_connection.cc
@@ -0,0 +1,586 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "examples/unityplugin/simple_peer_connection.h"
+
+#include <utility>
+
+#include "absl/memory/memory.h"
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/create_peerconnection_factory.h"
+#include "media/engine/internal_decoder_factory.h"
+#include "media/engine/internal_encoder_factory.h"
+#include "media/engine/multiplex_codec_factory.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "modules/video_capture/video_capture_factory.h"
+#include "pc/video_track_source.h"
+#include "test/vcm_capturer.h"
+
+#if defined(WEBRTC_ANDROID)
+#include "examples/unityplugin/class_reference_holder.h"
+#include "modules/utility/include/helpers_android.h"
+#include "sdk/android/src/jni/android_video_track_source.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#endif
+
+// Names used for media stream ids.
+const char kAudioLabel[] = "audio_label";
+const char kVideoLabel[] = "video_label";
+const char kStreamId[] = "stream_id";
+
+namespace {
+static int g_peer_count = 0;
+static std::unique_ptr<rtc::Thread> g_worker_thread;
+static std::unique_ptr<rtc::Thread> g_signaling_thread;
+static rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
+ g_peer_connection_factory;
+#if defined(WEBRTC_ANDROID)
+// Android case: the video track does not own the capturer, and it
+// relies on the app to dispose the capturer when the peerconnection
+// shuts down.
+static jobject g_camera = nullptr;
+#else
+class CapturerTrackSource : public webrtc::VideoTrackSource {
+ public:
+ static rtc::scoped_refptr<CapturerTrackSource> Create() {
+ const size_t kWidth = 640;
+ const size_t kHeight = 480;
+ const size_t kFps = 30;
+ const size_t kDeviceIndex = 0;
+ std::unique_ptr<webrtc::test::VcmCapturer> capturer = absl::WrapUnique(
+ webrtc::test::VcmCapturer::Create(kWidth, kHeight, kFps, kDeviceIndex));
+ if (!capturer) {
+ return nullptr;
+ }
+ return rtc::make_ref_counted<CapturerTrackSource>(std::move(capturer));
+ }
+
+ protected:
+ explicit CapturerTrackSource(
+ std::unique_ptr<webrtc::test::VcmCapturer> capturer)
+ : VideoTrackSource(/*remote=*/false), capturer_(std::move(capturer)) {}
+
+ private:
+ rtc::VideoSourceInterface<webrtc::VideoFrame>* source() override {
+ return capturer_.get();
+ }
+ std::unique_ptr<webrtc::test::VcmCapturer> capturer_;
+};
+
+#endif
+
+std::string GetEnvVarOrDefault(const char* env_var_name,
+ const char* default_value) {
+ std::string value;
+ const char* env_var = getenv(env_var_name);
+ if (env_var)
+ value = env_var;
+
+ if (value.empty())
+ value = default_value;
+
+ return value;
+}
+
+std::string GetPeerConnectionString() {
+ return GetEnvVarOrDefault("WEBRTC_CONNECT", "stun:stun.l.google.com:19302");
+}
+
+class DummySetSessionDescriptionObserver
+ : public webrtc::SetSessionDescriptionObserver {
+ public:
+ static rtc::scoped_refptr<DummySetSessionDescriptionObserver> Create() {
+ return rtc::make_ref_counted<DummySetSessionDescriptionObserver>();
+ }
+ virtual void OnSuccess() { RTC_LOG(LS_INFO) << __FUNCTION__; }
+ virtual void OnFailure(webrtc::RTCError error) {
+ RTC_LOG(LS_INFO) << __FUNCTION__ << " " << ToString(error.type()) << ": "
+ << error.message();
+ }
+
+ protected:
+ DummySetSessionDescriptionObserver() {}
+ ~DummySetSessionDescriptionObserver() {}
+};
+
+} // namespace
+
+bool SimplePeerConnection::InitializePeerConnection(const char** turn_urls,
+ const int no_of_urls,
+ const char* username,
+ const char* credential,
+ bool is_receiver) {
+ RTC_DCHECK(peer_connection_.get() == nullptr);
+
+ if (g_peer_connection_factory == nullptr) {
+ g_worker_thread = rtc::Thread::Create();
+ g_worker_thread->Start();
+ g_signaling_thread = rtc::Thread::Create();
+ g_signaling_thread->Start();
+
+ g_peer_connection_factory = webrtc::CreatePeerConnectionFactory(
+ g_worker_thread.get(), g_worker_thread.get(), g_signaling_thread.get(),
+ nullptr, webrtc::CreateBuiltinAudioEncoderFactory(),
+ webrtc::CreateBuiltinAudioDecoderFactory(),
+ std::unique_ptr<webrtc::VideoEncoderFactory>(
+ new webrtc::MultiplexEncoderFactory(
+ std::make_unique<webrtc::InternalEncoderFactory>())),
+ std::unique_ptr<webrtc::VideoDecoderFactory>(
+ new webrtc::MultiplexDecoderFactory(
+ std::make_unique<webrtc::InternalDecoderFactory>())),
+ nullptr, nullptr);
+ }
+ if (!g_peer_connection_factory.get()) {
+ DeletePeerConnection();
+ return false;
+ }
+
+ g_peer_count++;
+ if (!CreatePeerConnection(turn_urls, no_of_urls, username, credential)) {
+ DeletePeerConnection();
+ return false;
+ }
+
+ mandatory_receive_ = is_receiver;
+ return peer_connection_.get() != nullptr;
+}
+
+bool SimplePeerConnection::CreatePeerConnection(const char** turn_urls,
+ const int no_of_urls,
+ const char* username,
+ const char* credential) {
+ RTC_DCHECK(g_peer_connection_factory.get() != nullptr);
+ RTC_DCHECK(peer_connection_.get() == nullptr);
+
+ local_video_observer_.reset(new VideoObserver());
+ remote_video_observer_.reset(new VideoObserver());
+
+ // Add the turn server.
+ if (turn_urls != nullptr) {
+ if (no_of_urls > 0) {
+ webrtc::PeerConnectionInterface::IceServer turn_server;
+ for (int i = 0; i < no_of_urls; i++) {
+ std::string url(turn_urls[i]);
+ if (url.length() > 0)
+ turn_server.urls.push_back(turn_urls[i]);
+ }
+
+ std::string user_name(username);
+ if (user_name.length() > 0)
+ turn_server.username = username;
+
+ std::string password(credential);
+ if (password.length() > 0)
+ turn_server.password = credential;
+
+ config_.servers.push_back(turn_server);
+ }
+ }
+
+ // Add the stun server.
+ webrtc::PeerConnectionInterface::IceServer stun_server;
+ stun_server.uri = GetPeerConnectionString();
+ config_.servers.push_back(stun_server);
+
+ auto result = g_peer_connection_factory->CreatePeerConnectionOrError(
+ config_, webrtc::PeerConnectionDependencies(this));
+ if (!result.ok()) {
+ peer_connection_ = nullptr;
+ return false;
+ }
+ peer_connection_ = result.MoveValue();
+ return true;
+}
+
+void SimplePeerConnection::DeletePeerConnection() {
+ g_peer_count--;
+
+#if defined(WEBRTC_ANDROID)
+ if (g_camera) {
+ JNIEnv* env = webrtc::jni::GetEnv();
+ jclass pc_factory_class =
+ unity_plugin::FindClass(env, "org/webrtc/UnityUtility");
+ jmethodID stop_camera_method = webrtc::GetStaticMethodID(
+ env, pc_factory_class, "StopCamera", "(Lorg/webrtc/VideoCapturer;)V");
+
+ env->CallStaticVoidMethod(pc_factory_class, stop_camera_method, g_camera);
+ CHECK_EXCEPTION(env);
+
+ g_camera = nullptr;
+ }
+#endif
+
+ CloseDataChannel();
+ peer_connection_ = nullptr;
+ active_streams_.clear();
+
+ if (g_peer_count == 0) {
+ g_peer_connection_factory = nullptr;
+ g_signaling_thread.reset();
+ g_worker_thread.reset();
+ }
+}
+
+bool SimplePeerConnection::CreateOffer() {
+ if (!peer_connection_.get())
+ return false;
+
+ webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options;
+ if (mandatory_receive_) {
+ options.offer_to_receive_audio = true;
+ options.offer_to_receive_video = true;
+ }
+ peer_connection_->CreateOffer(this, options);
+ return true;
+}
+
+bool SimplePeerConnection::CreateAnswer() {
+ if (!peer_connection_.get())
+ return false;
+
+ webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options;
+ if (mandatory_receive_) {
+ options.offer_to_receive_audio = true;
+ options.offer_to_receive_video = true;
+ }
+ peer_connection_->CreateAnswer(this, options);
+ return true;
+}
+
+void SimplePeerConnection::OnSuccess(
+ webrtc::SessionDescriptionInterface* desc) {
+ peer_connection_->SetLocalDescription(
+ DummySetSessionDescriptionObserver::Create().get(), desc);
+
+ std::string sdp;
+ desc->ToString(&sdp);
+
+ if (OnLocalSdpReady)
+ OnLocalSdpReady(desc->type().c_str(), sdp.c_str());
+}
+
+void SimplePeerConnection::OnFailure(webrtc::RTCError error) {
+ RTC_LOG(LS_ERROR) << ToString(error.type()) << ": " << error.message();
+
+ // TODO(hta): include error.type in the message
+ if (OnFailureMessage)
+ OnFailureMessage(error.message());
+}
+
+void SimplePeerConnection::OnIceCandidate(
+ const webrtc::IceCandidateInterface* candidate) {
+ RTC_LOG(LS_INFO) << __FUNCTION__ << " " << candidate->sdp_mline_index();
+
+ std::string sdp;
+ if (!candidate->ToString(&sdp)) {
+ RTC_LOG(LS_ERROR) << "Failed to serialize candidate";
+ return;
+ }
+
+ if (OnIceCandidateReady)
+ OnIceCandidateReady(sdp.c_str(), candidate->sdp_mline_index(),
+ candidate->sdp_mid().c_str());
+}
+
+void SimplePeerConnection::RegisterOnLocalI420FrameReady(
+ I420FRAMEREADY_CALLBACK callback) {
+ if (local_video_observer_)
+ local_video_observer_->SetVideoCallback(callback);
+}
+
+void SimplePeerConnection::RegisterOnRemoteI420FrameReady(
+ I420FRAMEREADY_CALLBACK callback) {
+ if (remote_video_observer_)
+ remote_video_observer_->SetVideoCallback(callback);
+}
+
+void SimplePeerConnection::RegisterOnLocalDataChannelReady(
+ LOCALDATACHANNELREADY_CALLBACK callback) {
+ OnLocalDataChannelReady = callback;
+}
+
+void SimplePeerConnection::RegisterOnDataFromDataChannelReady(
+ DATAFROMEDATECHANNELREADY_CALLBACK callback) {
+ OnDataFromDataChannelReady = callback;
+}
+
+void SimplePeerConnection::RegisterOnFailure(FAILURE_CALLBACK callback) {
+ OnFailureMessage = callback;
+}
+
+void SimplePeerConnection::RegisterOnAudioBusReady(
+ AUDIOBUSREADY_CALLBACK callback) {
+ OnAudioReady = callback;
+}
+
+void SimplePeerConnection::RegisterOnLocalSdpReadytoSend(
+ LOCALSDPREADYTOSEND_CALLBACK callback) {
+ OnLocalSdpReady = callback;
+}
+
+void SimplePeerConnection::RegisterOnIceCandidateReadytoSend(
+ ICECANDIDATEREADYTOSEND_CALLBACK callback) {
+ OnIceCandidateReady = callback;
+}
+
+bool SimplePeerConnection::SetRemoteDescription(const char* type,
+ const char* sdp) {
+ if (!peer_connection_)
+ return false;
+
+ std::string remote_desc(sdp);
+ std::string desc_type(type);
+ webrtc::SdpParseError error;
+ webrtc::SessionDescriptionInterface* session_description(
+ webrtc::CreateSessionDescription(desc_type, remote_desc, &error));
+ if (!session_description) {
+ RTC_LOG(LS_WARNING) << "Can't parse received session description message. "
+ "SdpParseError was: "
+ << error.description;
+ return false;
+ }
+ RTC_LOG(LS_INFO) << " Received session description :" << remote_desc;
+ peer_connection_->SetRemoteDescription(
+ DummySetSessionDescriptionObserver::Create().get(), session_description);
+
+ return true;
+}
+
+bool SimplePeerConnection::AddIceCandidate(const char* candidate,
+ const int sdp_mlineindex,
+ const char* sdp_mid) {
+ if (!peer_connection_)
+ return false;
+
+ webrtc::SdpParseError error;
+ std::unique_ptr<webrtc::IceCandidateInterface> ice_candidate(
+ webrtc::CreateIceCandidate(sdp_mid, sdp_mlineindex, candidate, &error));
+ if (!ice_candidate.get()) {
+ RTC_LOG(LS_WARNING) << "Can't parse received candidate message. "
+ "SdpParseError was: "
+ << error.description;
+ return false;
+ }
+ if (!peer_connection_->AddIceCandidate(ice_candidate.get())) {
+ RTC_LOG(LS_WARNING) << "Failed to apply the received candidate";
+ return false;
+ }
+ RTC_LOG(LS_INFO) << " Received candidate :" << candidate;
+ return true;
+}
+
+void SimplePeerConnection::SetAudioControl(bool is_mute, bool is_record) {
+ is_mute_audio_ = is_mute;
+ is_record_audio_ = is_record;
+
+ SetAudioControl();
+}
+
+void SimplePeerConnection::SetAudioControl() {
+ if (!remote_stream_)
+ return;
+ webrtc::AudioTrackVector tracks = remote_stream_->GetAudioTracks();
+ if (tracks.empty())
+ return;
+
+ rtc::scoped_refptr<webrtc::AudioTrackInterface>& audio_track = tracks[0];
+ if (is_record_audio_)
+ audio_track->AddSink(this);
+ else
+ audio_track->RemoveSink(this);
+
+ for (auto& track : tracks) {
+ if (is_mute_audio_)
+ track->set_enabled(false);
+ else
+ track->set_enabled(true);
+ }
+}
+
+void SimplePeerConnection::OnAddStream(
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream) {
+ RTC_LOG(LS_INFO) << __FUNCTION__ << " " << stream->id();
+ remote_stream_ = stream;
+ if (remote_video_observer_ && !remote_stream_->GetVideoTracks().empty()) {
+ remote_stream_->GetVideoTracks()[0]->AddOrUpdateSink(
+ remote_video_observer_.get(), rtc::VideoSinkWants());
+ }
+ SetAudioControl();
+}
+
+void SimplePeerConnection::AddStreams(bool audio_only) {
+ if (active_streams_.find(kStreamId) != active_streams_.end())
+ return; // Already added.
+
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
+ g_peer_connection_factory->CreateLocalMediaStream(kStreamId);
+
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
+ g_peer_connection_factory->CreateAudioTrack(
+ kAudioLabel,
+ g_peer_connection_factory->CreateAudioSource(cricket::AudioOptions())
+ .get()));
+ stream->AddTrack(audio_track);
+
+ if (!audio_only) {
+#if defined(WEBRTC_ANDROID)
+ JNIEnv* env = webrtc::jni::GetEnv();
+ jclass pc_factory_class =
+ unity_plugin::FindClass(env, "org/webrtc/UnityUtility");
+ jmethodID load_texture_helper_method = webrtc::GetStaticMethodID(
+ env, pc_factory_class, "LoadSurfaceTextureHelper",
+ "()Lorg/webrtc/SurfaceTextureHelper;");
+ jobject texture_helper = env->CallStaticObjectMethod(
+ pc_factory_class, load_texture_helper_method);
+ CHECK_EXCEPTION(env);
+ RTC_DCHECK(texture_helper != nullptr)
+ << "Cannot get the Surface Texture Helper.";
+
+ auto source = rtc::make_ref_counted<webrtc::jni::AndroidVideoTrackSource>(
+ g_signaling_thread.get(), env, /*is_screencast=*/false,
+ /*align_timestamps=*/true);
+
+ // link with VideoCapturer (Camera);
+ jmethodID link_camera_method = webrtc::GetStaticMethodID(
+ env, pc_factory_class, "LinkCamera",
+ "(JLorg/webrtc/SurfaceTextureHelper;)Lorg/webrtc/VideoCapturer;");
+ jobject camera_tmp =
+ env->CallStaticObjectMethod(pc_factory_class, link_camera_method,
+ (jlong)source.get(), texture_helper);
+ CHECK_EXCEPTION(env);
+ g_camera = (jobject)env->NewGlobalRef(camera_tmp);
+
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
+ g_peer_connection_factory->CreateVideoTrack(source, kVideoLabel));
+ stream->AddTrack(video_track);
+#else
+ rtc::scoped_refptr<CapturerTrackSource> video_device =
+ CapturerTrackSource::Create();
+ if (video_device) {
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
+ g_peer_connection_factory->CreateVideoTrack(video_device,
+ kVideoLabel));
+
+ stream->AddTrack(video_track);
+ }
+#endif
+ if (local_video_observer_ && !stream->GetVideoTracks().empty()) {
+ stream->GetVideoTracks()[0]->AddOrUpdateSink(local_video_observer_.get(),
+ rtc::VideoSinkWants());
+ }
+ }
+
+ if (!peer_connection_->AddStream(stream.get())) {
+ RTC_LOG(LS_ERROR) << "Adding stream to PeerConnection failed";
+ }
+
+ typedef std::pair<std::string,
+ rtc::scoped_refptr<webrtc::MediaStreamInterface>>
+ MediaStreamPair;
+ active_streams_.insert(MediaStreamPair(stream->id(), stream));
+}
+
+bool SimplePeerConnection::CreateDataChannel() {
+ struct webrtc::DataChannelInit init;
+ init.ordered = true;
+ init.reliable = true;
+ auto result = peer_connection_->CreateDataChannelOrError("Hello", &init);
+ if (result.ok()) {
+ data_channel_ = result.MoveValue();
+ data_channel_->RegisterObserver(this);
+ RTC_LOG(LS_INFO) << "Succeeds to create data channel";
+ return true;
+ } else {
+ RTC_LOG(LS_INFO) << "Fails to create data channel";
+ return false;
+ }
+}
+
+void SimplePeerConnection::CloseDataChannel() {
+ if (data_channel_.get()) {
+ data_channel_->UnregisterObserver();
+ data_channel_->Close();
+ }
+ data_channel_ = nullptr;
+}
+
+bool SimplePeerConnection::SendDataViaDataChannel(const std::string& data) {
+ if (!data_channel_.get()) {
+ RTC_LOG(LS_INFO) << "Data channel is not established";
+ return false;
+ }
+ webrtc::DataBuffer buffer(data);
+ data_channel_->Send(buffer);
+ return true;
+}
+
+// Peerconnection observer
+void SimplePeerConnection::OnDataChannel(
+ rtc::scoped_refptr<webrtc::DataChannelInterface> channel) {
+ channel->RegisterObserver(this);
+}
+
+void SimplePeerConnection::OnStateChange() {
+ if (data_channel_) {
+ webrtc::DataChannelInterface::DataState state = data_channel_->state();
+ if (state == webrtc::DataChannelInterface::kOpen) {
+ if (OnLocalDataChannelReady)
+ OnLocalDataChannelReady();
+ RTC_LOG(LS_INFO) << "Data channel is open";
+ }
+ }
+}
+
+// A data buffer was successfully received.
+void SimplePeerConnection::OnMessage(const webrtc::DataBuffer& buffer) {
+ size_t size = buffer.data.size();
+ char* msg = new char[size + 1];
+ memcpy(msg, buffer.data.data(), size);
+ msg[size] = 0;
+ if (OnDataFromDataChannelReady)
+ OnDataFromDataChannelReady(msg);
+ delete[] msg;
+}
+
+// AudioTrackSinkInterface implementation.
+void SimplePeerConnection::OnData(const void* audio_data,
+ int bits_per_sample,
+ int sample_rate,
+ size_t number_of_channels,
+ size_t number_of_frames) {
+ if (OnAudioReady)
+ OnAudioReady(audio_data, bits_per_sample, sample_rate,
+ static_cast<int>(number_of_channels),
+ static_cast<int>(number_of_frames));
+}
+
+std::vector<uint32_t> SimplePeerConnection::GetRemoteAudioTrackSsrcs() {
+ std::vector<rtc::scoped_refptr<webrtc::RtpReceiverInterface>> receivers =
+ peer_connection_->GetReceivers();
+
+ std::vector<uint32_t> ssrcs;
+ for (const auto& receiver : receivers) {
+ if (receiver->media_type() != cricket::MEDIA_TYPE_AUDIO)
+ continue;
+
+ std::vector<webrtc::RtpEncodingParameters> params =
+ receiver->GetParameters().encodings;
+
+ for (const auto& param : params) {
+ uint32_t ssrc = param.ssrc.value_or(0);
+ if (ssrc > 0)
+ ssrcs.push_back(ssrc);
+ }
+ }
+
+ return ssrcs;
+}
diff --git a/third_party/libwebrtc/examples/unityplugin/simple_peer_connection.h b/third_party/libwebrtc/examples/unityplugin/simple_peer_connection.h
new file mode 100644
index 0000000000..de652ef118
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/simple_peer_connection.h
@@ -0,0 +1,135 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef EXAMPLES_UNITYPLUGIN_SIMPLE_PEER_CONNECTION_H_
+#define EXAMPLES_UNITYPLUGIN_SIMPLE_PEER_CONNECTION_H_
+
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "api/data_channel_interface.h"
+#include "api/media_stream_interface.h"
+#include "api/peer_connection_interface.h"
+#include "examples/unityplugin/unity_plugin_apis.h"
+#include "examples/unityplugin/video_observer.h"
+
+class SimplePeerConnection : public webrtc::PeerConnectionObserver,
+ public webrtc::CreateSessionDescriptionObserver,
+ public webrtc::DataChannelObserver,
+ public webrtc::AudioTrackSinkInterface {
+ public:
+ SimplePeerConnection() {}
+ ~SimplePeerConnection() {}
+
+ bool InitializePeerConnection(const char** turn_urls,
+ int no_of_urls,
+ const char* username,
+ const char* credential,
+ bool is_receiver);
+ void DeletePeerConnection();
+ void AddStreams(bool audio_only);
+ bool CreateDataChannel();
+ bool CreateOffer();
+ bool CreateAnswer();
+ bool SendDataViaDataChannel(const std::string& data);
+ void SetAudioControl(bool is_mute, bool is_record);
+
+ // Register callback functions.
+ void RegisterOnLocalI420FrameReady(I420FRAMEREADY_CALLBACK callback);
+ void RegisterOnRemoteI420FrameReady(I420FRAMEREADY_CALLBACK callback);
+ void RegisterOnLocalDataChannelReady(LOCALDATACHANNELREADY_CALLBACK callback);
+ void RegisterOnDataFromDataChannelReady(
+ DATAFROMEDATECHANNELREADY_CALLBACK callback);
+ void RegisterOnFailure(FAILURE_CALLBACK callback);
+ void RegisterOnAudioBusReady(AUDIOBUSREADY_CALLBACK callback);
+ void RegisterOnLocalSdpReadytoSend(LOCALSDPREADYTOSEND_CALLBACK callback);
+ void RegisterOnIceCandidateReadytoSend(
+ ICECANDIDATEREADYTOSEND_CALLBACK callback);
+ bool SetRemoteDescription(const char* type, const char* sdp);
+ bool AddIceCandidate(const char* sdp,
+ int sdp_mlineindex,
+ const char* sdp_mid);
+
+ protected:
+ // create a peerconneciton and add the turn servers info to the configuration.
+ bool CreatePeerConnection(const char** turn_urls,
+ int no_of_urls,
+ const char* username,
+ const char* credential);
+ void CloseDataChannel();
+ void SetAudioControl();
+
+ // PeerConnectionObserver implementation.
+ void OnSignalingChange(
+ webrtc::PeerConnectionInterface::SignalingState new_state) override {}
+ void OnAddStream(
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream) override;
+ void OnRemoveStream(
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream) override {}
+ void OnDataChannel(
+ rtc::scoped_refptr<webrtc::DataChannelInterface> channel) override;
+ void OnRenegotiationNeeded() override {}
+ void OnIceConnectionChange(
+ webrtc::PeerConnectionInterface::IceConnectionState new_state) override {}
+ void OnIceGatheringChange(
+ webrtc::PeerConnectionInterface::IceGatheringState new_state) override {}
+ void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override;
+ void OnIceConnectionReceivingChange(bool receiving) override {}
+
+ // CreateSessionDescriptionObserver implementation.
+ void OnSuccess(webrtc::SessionDescriptionInterface* desc) override;
+ void OnFailure(webrtc::RTCError error) override;
+
+ // DataChannelObserver implementation.
+ void OnStateChange() override;
+ void OnMessage(const webrtc::DataBuffer& buffer) override;
+
+ // AudioTrackSinkInterface implementation.
+ void OnData(const void* audio_data,
+ int bits_per_sample,
+ int sample_rate,
+ size_t number_of_channels,
+ size_t number_of_frames) override;
+
+ // Get remote audio tracks ssrcs.
+ std::vector<uint32_t> GetRemoteAudioTrackSsrcs();
+
+ private:
+ rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection_;
+ rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel_;
+ std::map<std::string, rtc::scoped_refptr<webrtc::MediaStreamInterface> >
+ active_streams_;
+
+ std::unique_ptr<VideoObserver> local_video_observer_;
+ std::unique_ptr<VideoObserver> remote_video_observer_;
+
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> remote_stream_ = nullptr;
+ webrtc::PeerConnectionInterface::RTCConfiguration config_;
+
+ LOCALDATACHANNELREADY_CALLBACK OnLocalDataChannelReady = nullptr;
+ DATAFROMEDATECHANNELREADY_CALLBACK OnDataFromDataChannelReady = nullptr;
+ FAILURE_CALLBACK OnFailureMessage = nullptr;
+ AUDIOBUSREADY_CALLBACK OnAudioReady = nullptr;
+
+ LOCALSDPREADYTOSEND_CALLBACK OnLocalSdpReady = nullptr;
+ ICECANDIDATEREADYTOSEND_CALLBACK OnIceCandidateReady = nullptr;
+
+ bool is_mute_audio_ = false;
+ bool is_record_audio_ = false;
+ bool mandatory_receive_ = false;
+
+ // disallow copy-and-assign
+ SimplePeerConnection(const SimplePeerConnection&) = delete;
+ SimplePeerConnection& operator=(const SimplePeerConnection&) = delete;
+};
+
+#endif // EXAMPLES_UNITYPLUGIN_SIMPLE_PEER_CONNECTION_H_
diff --git a/third_party/libwebrtc/examples/unityplugin/unity_plugin_apis.cc b/third_party/libwebrtc/examples/unityplugin/unity_plugin_apis.cc
new file mode 100644
index 0000000000..6e34d7e1e0
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/unity_plugin_apis.cc
@@ -0,0 +1,196 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "examples/unityplugin/unity_plugin_apis.h"
+
+#include <map>
+#include <string>
+
+#include "examples/unityplugin/simple_peer_connection.h"
+
+namespace {
+static int g_peer_connection_id = 1;
+static std::map<int, rtc::scoped_refptr<SimplePeerConnection>>
+ g_peer_connection_map;
+} // namespace
+
+int CreatePeerConnection(const char** turn_urls,
+ const int no_of_urls,
+ const char* username,
+ const char* credential,
+ bool mandatory_receive_video) {
+ g_peer_connection_map[g_peer_connection_id] =
+ rtc::make_ref_counted<SimplePeerConnection>();
+
+ if (!g_peer_connection_map[g_peer_connection_id]->InitializePeerConnection(
+ turn_urls, no_of_urls, username, credential, mandatory_receive_video))
+ return -1;
+
+ return g_peer_connection_id++;
+}
+
+bool ClosePeerConnection(int peer_connection_id) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ g_peer_connection_map[peer_connection_id]->DeletePeerConnection();
+ g_peer_connection_map.erase(peer_connection_id);
+ return true;
+}
+
+bool AddStream(int peer_connection_id, bool audio_only) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ g_peer_connection_map[peer_connection_id]->AddStreams(audio_only);
+ return true;
+}
+
+bool AddDataChannel(int peer_connection_id) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ return g_peer_connection_map[peer_connection_id]->CreateDataChannel();
+}
+
+bool CreateOffer(int peer_connection_id) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ return g_peer_connection_map[peer_connection_id]->CreateOffer();
+}
+
+bool CreateAnswer(int peer_connection_id) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ return g_peer_connection_map[peer_connection_id]->CreateAnswer();
+}
+
+bool SendDataViaDataChannel(int peer_connection_id, const char* data) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ std::string s(data);
+ g_peer_connection_map[peer_connection_id]->SendDataViaDataChannel(s);
+
+ return true;
+}
+
+bool SetAudioControl(int peer_connection_id, bool is_mute, bool is_record) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ g_peer_connection_map[peer_connection_id]->SetAudioControl(is_mute,
+ is_record);
+ return true;
+}
+
+bool SetRemoteDescription(int peer_connection_id,
+ const char* type,
+ const char* sdp) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ return g_peer_connection_map[peer_connection_id]->SetRemoteDescription(type,
+ sdp);
+}
+
+bool AddIceCandidate(const int peer_connection_id,
+ const char* candidate,
+ const int sdp_mlineindex,
+ const char* sdp_mid) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ return g_peer_connection_map[peer_connection_id]->AddIceCandidate(
+ candidate, sdp_mlineindex, sdp_mid);
+}
+
+// Register callback functions.
+bool RegisterOnLocalI420FrameReady(int peer_connection_id,
+ I420FRAMEREADY_CALLBACK callback) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ g_peer_connection_map[peer_connection_id]->RegisterOnLocalI420FrameReady(
+ callback);
+ return true;
+}
+
+bool RegisterOnRemoteI420FrameReady(int peer_connection_id,
+ I420FRAMEREADY_CALLBACK callback) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ g_peer_connection_map[peer_connection_id]->RegisterOnRemoteI420FrameReady(
+ callback);
+ return true;
+}
+
+bool RegisterOnLocalDataChannelReady(int peer_connection_id,
+ LOCALDATACHANNELREADY_CALLBACK callback) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ g_peer_connection_map[peer_connection_id]->RegisterOnLocalDataChannelReady(
+ callback);
+ return true;
+}
+
+bool RegisterOnDataFromDataChannelReady(
+ int peer_connection_id,
+ DATAFROMEDATECHANNELREADY_CALLBACK callback) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ g_peer_connection_map[peer_connection_id]->RegisterOnDataFromDataChannelReady(
+ callback);
+ return true;
+}
+
+bool RegisterOnFailure(int peer_connection_id, FAILURE_CALLBACK callback) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ g_peer_connection_map[peer_connection_id]->RegisterOnFailure(callback);
+ return true;
+}
+
+bool RegisterOnAudioBusReady(int peer_connection_id,
+ AUDIOBUSREADY_CALLBACK callback) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ g_peer_connection_map[peer_connection_id]->RegisterOnAudioBusReady(callback);
+ return true;
+}
+
+// Singnaling channel related functions.
+bool RegisterOnLocalSdpReadytoSend(int peer_connection_id,
+ LOCALSDPREADYTOSEND_CALLBACK callback) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ g_peer_connection_map[peer_connection_id]->RegisterOnLocalSdpReadytoSend(
+ callback);
+ return true;
+}
+
+bool RegisterOnIceCandidateReadytoSend(
+ int peer_connection_id,
+ ICECANDIDATEREADYTOSEND_CALLBACK callback) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ g_peer_connection_map[peer_connection_id]->RegisterOnIceCandidateReadytoSend(
+ callback);
+ return true;
+}
diff --git a/third_party/libwebrtc/examples/unityplugin/unity_plugin_apis.h b/third_party/libwebrtc/examples/unityplugin/unity_plugin_apis.h
new file mode 100644
index 0000000000..9790dc57b9
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/unity_plugin_apis.h
@@ -0,0 +1,108 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file provides an example of unity native plugin APIs.
+
+#ifndef EXAMPLES_UNITYPLUGIN_UNITY_PLUGIN_APIS_H_
+#define EXAMPLES_UNITYPLUGIN_UNITY_PLUGIN_APIS_H_
+
+#include <stdint.h>
+
+// Definitions of callback functions.
+typedef void (*I420FRAMEREADY_CALLBACK)(const uint8_t* data_y,
+ const uint8_t* data_u,
+ const uint8_t* data_v,
+ const uint8_t* data_a,
+ int stride_y,
+ int stride_u,
+ int stride_v,
+ int stride_a,
+ uint32_t width,
+ uint32_t height);
+typedef void (*LOCALDATACHANNELREADY_CALLBACK)();
+typedef void (*DATAFROMEDATECHANNELREADY_CALLBACK)(const char* msg);
+typedef void (*FAILURE_CALLBACK)(const char* msg);
+typedef void (*LOCALSDPREADYTOSEND_CALLBACK)(const char* type, const char* sdp);
+typedef void (*ICECANDIDATEREADYTOSEND_CALLBACK)(const char* candidate,
+ int sdp_mline_index,
+ const char* sdp_mid);
+typedef void (*AUDIOBUSREADY_CALLBACK)(const void* audio_data,
+ int bits_per_sample,
+ int sample_rate,
+ int number_of_channels,
+ int number_of_frames);
+
+#if defined(WEBRTC_WIN)
+#define WEBRTC_PLUGIN_API __declspec(dllexport)
+#elif defined(WEBRTC_ANDROID)
+#define WEBRTC_PLUGIN_API __attribute__((visibility("default")))
+#endif
+extern "C" {
+// Create a peerconnection and return a unique peer connection id.
+WEBRTC_PLUGIN_API int CreatePeerConnection(const char** turn_urls,
+ int no_of_urls,
+ const char* username,
+ const char* credential,
+ bool mandatory_receive_video);
+// Close a peerconnection.
+WEBRTC_PLUGIN_API bool ClosePeerConnection(int peer_connection_id);
+// Add a audio stream. If audio_only is true, the stream only has an audio
+// track and no video track.
+WEBRTC_PLUGIN_API bool AddStream(int peer_connection_id, bool audio_only);
+// Add a data channel to peer connection.
+WEBRTC_PLUGIN_API bool AddDataChannel(int peer_connection_id);
+// Create a peer connection offer.
+WEBRTC_PLUGIN_API bool CreateOffer(int peer_connection_id);
+// Create a peer connection answer.
+WEBRTC_PLUGIN_API bool CreateAnswer(int peer_connection_id);
+// Send data through data channel.
+WEBRTC_PLUGIN_API bool SendDataViaDataChannel(int peer_connection_id,
+ const char* data);
+// Set audio control. If is_mute=true, no audio will playout. If is_record=true,
+// AUDIOBUSREADY_CALLBACK will be called every 10 ms.
+WEBRTC_PLUGIN_API bool SetAudioControl(int peer_connection_id,
+ bool is_mute,
+ bool is_record);
+// Set remote sdp.
+WEBRTC_PLUGIN_API bool SetRemoteDescription(int peer_connection_id,
+ const char* type,
+ const char* sdp);
+// Add ice candidate.
+WEBRTC_PLUGIN_API bool AddIceCandidate(int peer_connection_id,
+ const char* candidate,
+ int sdp_mlineindex,
+ const char* sdp_mid);
+
+// Register callback functions.
+WEBRTC_PLUGIN_API bool RegisterOnLocalI420FrameReady(
+ int peer_connection_id,
+ I420FRAMEREADY_CALLBACK callback);
+WEBRTC_PLUGIN_API bool RegisterOnRemoteI420FrameReady(
+ int peer_connection_id,
+ I420FRAMEREADY_CALLBACK callback);
+WEBRTC_PLUGIN_API bool RegisterOnLocalDataChannelReady(
+ int peer_connection_id,
+ LOCALDATACHANNELREADY_CALLBACK callback);
+WEBRTC_PLUGIN_API bool RegisterOnDataFromDataChannelReady(
+ int peer_connection_id,
+ DATAFROMEDATECHANNELREADY_CALLBACK callback);
+WEBRTC_PLUGIN_API bool RegisterOnFailure(int peer_connection_id,
+ FAILURE_CALLBACK callback);
+WEBRTC_PLUGIN_API bool RegisterOnAudioBusReady(int peer_connection_id,
+ AUDIOBUSREADY_CALLBACK callback);
+WEBRTC_PLUGIN_API bool RegisterOnLocalSdpReadytoSend(
+ int peer_connection_id,
+ LOCALSDPREADYTOSEND_CALLBACK callback);
+WEBRTC_PLUGIN_API bool RegisterOnIceCandidateReadytoSend(
+ int peer_connection_id,
+ ICECANDIDATEREADYTOSEND_CALLBACK callback);
+}
+
+#endif // EXAMPLES_UNITYPLUGIN_UNITY_PLUGIN_APIS_H_
diff --git a/third_party/libwebrtc/examples/unityplugin/video_observer.cc b/third_party/libwebrtc/examples/unityplugin/video_observer.cc
new file mode 100644
index 0000000000..7e33b08e27
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/video_observer.cc
@@ -0,0 +1,44 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "examples/unityplugin/video_observer.h"
+
+void VideoObserver::SetVideoCallback(I420FRAMEREADY_CALLBACK callback) {
+ std::lock_guard<std::mutex> lock(mutex);
+ OnI420FrameReady = callback;
+}
+
+void VideoObserver::OnFrame(const webrtc::VideoFrame& frame) {
+ std::unique_lock<std::mutex> lock(mutex);
+ if (!OnI420FrameReady)
+ return;
+
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
+ frame.video_frame_buffer());
+
+ if (buffer->type() != webrtc::VideoFrameBuffer::Type::kI420A) {
+ rtc::scoped_refptr<webrtc::I420BufferInterface> i420_buffer =
+ buffer->ToI420();
+ OnI420FrameReady(i420_buffer->DataY(), i420_buffer->DataU(),
+ i420_buffer->DataV(), nullptr, i420_buffer->StrideY(),
+ i420_buffer->StrideU(), i420_buffer->StrideV(), 0,
+ frame.width(), frame.height());
+
+ } else {
+ // The buffer has alpha channel.
+ const webrtc::I420ABufferInterface* i420a_buffer = buffer->GetI420A();
+
+ OnI420FrameReady(i420a_buffer->DataY(), i420a_buffer->DataU(),
+ i420a_buffer->DataV(), i420a_buffer->DataA(),
+ i420a_buffer->StrideY(), i420a_buffer->StrideU(),
+ i420a_buffer->StrideV(), i420a_buffer->StrideA(),
+ frame.width(), frame.height());
+ }
+}
diff --git a/third_party/libwebrtc/examples/unityplugin/video_observer.h b/third_party/libwebrtc/examples/unityplugin/video_observer.h
new file mode 100644
index 0000000000..01ccd2191a
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/video_observer.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef EXAMPLES_UNITYPLUGIN_VIDEO_OBSERVER_H_
+#define EXAMPLES_UNITYPLUGIN_VIDEO_OBSERVER_H_
+
+#include <mutex>
+
+#include "api/media_stream_interface.h"
+#include "api/video/video_sink_interface.h"
+#include "examples/unityplugin/unity_plugin_apis.h"
+
+class VideoObserver : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
+ public:
+ VideoObserver() {}
+ ~VideoObserver() {}
+ void SetVideoCallback(I420FRAMEREADY_CALLBACK callback);
+
+ protected:
+ // VideoSinkInterface implementation
+ void OnFrame(const webrtc::VideoFrame& frame) override;
+
+ private:
+ I420FRAMEREADY_CALLBACK OnI420FrameReady = nullptr;
+ std::mutex mutex;
+};
+
+#endif // EXAMPLES_UNITYPLUGIN_VIDEO_OBSERVER_H_